bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
| 
2052.3.2
by John Arbash Meinel
 Change Copyright .. by Canonical to Copyright ... Canonical  | 
1  | 
# Copyright (C) 2005 Canonical Ltd
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
2  | 
#
 | 
3  | 
# Authors:
 | 
|
4  | 
#   Johan Rydberg <jrydberg@gnu.org>
 | 
|
5  | 
#
 | 
|
6  | 
# This program is free software; you can redistribute it and/or modify
 | 
|
7  | 
# it under the terms of the GNU General Public License as published by
 | 
|
8  | 
# the Free Software Foundation; either version 2 of the License, or
 | 
|
9  | 
# (at your option) any later version.
 | 
|
| 
1887.1.1
by Adeodato Simó
 Do not separate paragraphs in the copyright statement with blank lines,  | 
10  | 
#
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
11  | 
# This program is distributed in the hope that it will be useful,
 | 
12  | 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
13  | 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|
14  | 
# GNU General Public License for more details.
 | 
|
| 
1887.1.1
by Adeodato Simó
 Do not separate paragraphs in the copyright statement with blank lines,  | 
15  | 
#
 | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
16  | 
# You should have received a copy of the GNU General Public License
 | 
17  | 
# along with this program; if not, write to the Free Software
 | 
|
18  | 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 | 
|
19  | 
||
20  | 
||
| 
1704.2.15
by Martin Pool
 Remove TODO about knit testing printed from test suite  | 
21  | 
# TODO: might be nice to create a versionedfile with some type of corruption
 | 
22  | 
# considered typical and check that it can be detected/corrected.
 | 
|
23  | 
||
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
24  | 
from StringIO import StringIO  | 
25  | 
||
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
26  | 
import bzrlib  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
27  | 
from bzrlib import (  | 
28  | 
errors,  | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
29  | 
osutils,  | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
30  | 
progress,  | 
31  | 
    )
 | 
|
| 
1563.2.11
by Robert Collins
 Consolidate reweave and join as we have no separate usage, make reweave tests apply to all versionedfile implementations and deprecate the old reweave apis.  | 
32  | 
from bzrlib.errors import (  | 
33  | 
RevisionNotPresent,  | 
|
34  | 
RevisionAlreadyPresent,  | 
|
35  | 
                           WeaveParentMismatch
 | 
|
36  | 
                           )
 | 
|
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
37  | 
from bzrlib.knit import KnitVersionedFile, \  | 
38  | 
     KnitAnnotateFactory
 | 
|
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
39  | 
from bzrlib.tests import TestCaseWithTransport  | 
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
40  | 
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
41  | 
from bzrlib.trace import mutter  | 
| 
1563.2.16
by Robert Collins
 Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.  | 
42  | 
from bzrlib.transport import get_transport  | 
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
43  | 
from bzrlib.transport.memory import MemoryTransport  | 
| 
1684.3.1
by Robert Collins
 Fix versioned file joins with empty targets.  | 
44  | 
from bzrlib.tsort import topo_sort  | 
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
45  | 
import bzrlib.versionedfile as versionedfile  | 
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
46  | 
from bzrlib.weave import WeaveFile  | 
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
47  | 
from bzrlib.weavefile import read_weave, write_weave  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
48  | 
|
49  | 
||
50  | 
class VersionedFileTestMixIn(object):  | 
|
51  | 
"""A mixin test class for testing VersionedFiles.  | 
|
52  | 
||
53  | 
    This is not an adaptor-style test at this point because
 | 
|
54  | 
    theres no dynamic substitution of versioned file implementations,
 | 
|
55  | 
    they are strictly controlled by their owning repositories.
 | 
|
56  | 
    """
 | 
|
57  | 
||
58  | 
def test_add(self):  | 
|
59  | 
f = self.get_file()  | 
|
60  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
61  | 
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
62  | 
def verify_file(f):  | 
63  | 
versions = f.versions()  | 
|
64  | 
self.assertTrue('r0' in versions)  | 
|
65  | 
self.assertTrue('r1' in versions)  | 
|
66  | 
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])  | 
|
67  | 
self.assertEquals(f.get_text('r0'), 'a\nb\n')  | 
|
68  | 
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])  | 
|
| 
1563.2.18
by Robert Collins
 get knit repositories really using knits for text storage.  | 
69  | 
self.assertEqual(2, len(f))  | 
70  | 
self.assertEqual(2, f.num_versions())  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
71  | 
|
72  | 
self.assertRaises(RevisionNotPresent,  | 
|
73  | 
f.add_lines, 'r2', ['foo'], [])  | 
|
74  | 
self.assertRaises(RevisionAlreadyPresent,  | 
|
75  | 
f.add_lines, 'r1', [], [])  | 
|
76  | 
verify_file(f)  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
77  | 
        # this checks that reopen with create=True does not break anything.
 | 
78  | 
f = self.reopen_file(create=True)  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
79  | 
verify_file(f)  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
80  | 
|
| 
1596.2.32
by Robert Collins
 Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility.  | 
81  | 
def test_adds_with_parent_texts(self):  | 
82  | 
f = self.get_file()  | 
|
83  | 
parent_texts = {}  | 
|
84  | 
parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
85  | 
try:  | 
|
86  | 
parent_texts['r1'] = f.add_lines_with_ghosts('r1',  | 
|
87  | 
['r0', 'ghost'],  | 
|
88  | 
['b\n', 'c\n'],  | 
|
89  | 
parent_texts=parent_texts)  | 
|
90  | 
except NotImplementedError:  | 
|
91  | 
            # if the format doesn't support ghosts, just add normally.
 | 
|
92  | 
parent_texts['r1'] = f.add_lines('r1',  | 
|
93  | 
['r0'],  | 
|
94  | 
['b\n', 'c\n'],  | 
|
95  | 
parent_texts=parent_texts)  | 
|
96  | 
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)  | 
|
97  | 
self.assertNotEqual(None, parent_texts['r0'])  | 
|
98  | 
self.assertNotEqual(None, parent_texts['r1'])  | 
|
99  | 
def verify_file(f):  | 
|
100  | 
versions = f.versions()  | 
|
101  | 
self.assertTrue('r0' in versions)  | 
|
102  | 
self.assertTrue('r1' in versions)  | 
|
103  | 
self.assertTrue('r2' in versions)  | 
|
104  | 
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])  | 
|
105  | 
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])  | 
|
106  | 
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])  | 
|
107  | 
self.assertEqual(3, f.num_versions())  | 
|
108  | 
origins = f.annotate('r1')  | 
|
109  | 
self.assertEquals(origins[0][0], 'r0')  | 
|
110  | 
self.assertEquals(origins[1][0], 'r1')  | 
|
111  | 
origins = f.annotate('r2')  | 
|
112  | 
self.assertEquals(origins[0][0], 'r1')  | 
|
113  | 
self.assertEquals(origins[1][0], 'r2')  | 
|
114  | 
||
115  | 
verify_file(f)  | 
|
116  | 
f = self.reopen_file()  | 
|
117  | 
verify_file(f)  | 
|
118  | 
||
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
119  | 
def test_add_unicode_content(self):  | 
120  | 
        # unicode content is not permitted in versioned files. 
 | 
|
121  | 
        # versioned files version sequences of bytes only.
 | 
|
122  | 
vf = self.get_file()  | 
|
123  | 
self.assertRaises(errors.BzrBadParameterUnicode,  | 
|
124  | 
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])  | 
|
125  | 
self.assertRaises(  | 
|
126  | 
(errors.BzrBadParameterUnicode, NotImplementedError),  | 
|
127  | 
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])  | 
|
128  | 
||
129  | 
def test_inline_newline_throws(self):  | 
|
130  | 
        # \r characters are not permitted in lines being added
 | 
|
131  | 
vf = self.get_file()  | 
|
132  | 
self.assertRaises(errors.BzrBadParameterContainsNewline,  | 
|
133  | 
vf.add_lines, 'a', [], ['a\n\n'])  | 
|
134  | 
self.assertRaises(  | 
|
135  | 
(errors.BzrBadParameterContainsNewline, NotImplementedError),  | 
|
136  | 
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])  | 
|
137  | 
        # but inline CR's are allowed
 | 
|
138  | 
vf.add_lines('a', [], ['a\r\n'])  | 
|
139  | 
try:  | 
|
140  | 
vf.add_lines_with_ghosts('b', [], ['a\r\n'])  | 
|
141  | 
except NotImplementedError:  | 
|
142  | 
            pass
 | 
|
143  | 
||
| 
2229.2.1
by Aaron Bentley
 Reject reserved ids in versiondfile, tree, branch and repository  | 
144  | 
def test_add_reserved(self):  | 
145  | 
vf = self.get_file()  | 
|
146  | 
self.assertRaises(errors.ReservedId,  | 
|
147  | 
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])  | 
|
148  | 
||
149  | 
self.assertRaises(errors.ReservedId,  | 
|
150  | 
vf.add_delta, 'a:', [], None, 'sha1', False, ((0, 0, 0, []),))  | 
|
151  | 
||
152  | 
def test_get_reserved(self):  | 
|
153  | 
vf = self.get_file()  | 
|
154  | 
self.assertRaises(errors.ReservedId, vf.get_delta, 'b:')  | 
|
155  | 
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])  | 
|
156  | 
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')  | 
|
157  | 
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')  | 
|
158  | 
||
| 
1596.2.37
by Robert Collins
 Switch to delta based content copying in the generic versioned file copier.  | 
159  | 
def test_get_delta(self):  | 
| 
1596.2.36
by Robert Collins
 add a get_delta api to versioned_file.  | 
160  | 
f = self.get_file()  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
161  | 
sha1s = self._setup_for_deltas(f)  | 
162  | 
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,  | 
|
163  | 
[(0, 0, 1, [('base', 'line\n')])])  | 
|
164  | 
self.assertEqual(expected_delta, f.get_delta('base'))  | 
|
165  | 
next_parent = 'base'  | 
|
166  | 
text_name = 'chain1-'  | 
|
167  | 
for depth in range(26):  | 
|
168  | 
new_version = text_name + '%s' % depth  | 
|
169  | 
expected_delta = (next_parent, sha1s[depth],  | 
|
170  | 
False,  | 
|
171  | 
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])  | 
|
172  | 
self.assertEqual(expected_delta, f.get_delta(new_version))  | 
|
173  | 
next_parent = new_version  | 
|
174  | 
next_parent = 'base'  | 
|
175  | 
text_name = 'chain2-'  | 
|
176  | 
for depth in range(26):  | 
|
177  | 
new_version = text_name + '%s' % depth  | 
|
178  | 
expected_delta = (next_parent, sha1s[depth], False,  | 
|
179  | 
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])  | 
|
180  | 
self.assertEqual(expected_delta, f.get_delta(new_version))  | 
|
181  | 
next_parent = new_version  | 
|
182  | 
        # smoke test for eol support
 | 
|
183  | 
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])  | 
|
184  | 
self.assertEqual(['line'], f.get_lines('noeol'))  | 
|
185  | 
self.assertEqual(expected_delta, f.get_delta('noeol'))  | 
|
186  | 
||
187  | 
def test_get_deltas(self):  | 
|
188  | 
f = self.get_file()  | 
|
189  | 
sha1s = self._setup_for_deltas(f)  | 
|
190  | 
deltas = f.get_deltas(f.versions())  | 
|
191  | 
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,  | 
|
192  | 
[(0, 0, 1, [('base', 'line\n')])])  | 
|
193  | 
self.assertEqual(expected_delta, deltas['base'])  | 
|
194  | 
next_parent = 'base'  | 
|
195  | 
text_name = 'chain1-'  | 
|
196  | 
for depth in range(26):  | 
|
197  | 
new_version = text_name + '%s' % depth  | 
|
198  | 
expected_delta = (next_parent, sha1s[depth],  | 
|
199  | 
False,  | 
|
200  | 
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])  | 
|
201  | 
self.assertEqual(expected_delta, deltas[new_version])  | 
|
202  | 
next_parent = new_version  | 
|
203  | 
next_parent = 'base'  | 
|
204  | 
text_name = 'chain2-'  | 
|
205  | 
for depth in range(26):  | 
|
206  | 
new_version = text_name + '%s' % depth  | 
|
207  | 
expected_delta = (next_parent, sha1s[depth], False,  | 
|
208  | 
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])  | 
|
209  | 
self.assertEqual(expected_delta, deltas[new_version])  | 
|
210  | 
next_parent = new_version  | 
|
211  | 
        # smoke tests for eol support
 | 
|
212  | 
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])  | 
|
213  | 
self.assertEqual(['line'], f.get_lines('noeol'))  | 
|
214  | 
self.assertEqual(expected_delta, deltas['noeol'])  | 
|
215  | 
        # smoke tests for eol support - two noeol in a row same content
 | 
|
216  | 
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
217  | 
[(0, 1, 2, [('noeolsecond', 'line\n'), ('noeolsecond', 'line\n')])]),  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
218  | 
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,  | 
219  | 
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))  | 
|
220  | 
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))  | 
|
221  | 
self.assertTrue(deltas['noeolsecond'] in expected_deltas)  | 
|
222  | 
        # two no-eol in a row, different content
 | 
|
223  | 
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
224  | 
[(1, 2, 1, [('noeolnotshared', 'phone\n')])])  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
225  | 
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))  | 
226  | 
self.assertEqual(expected_delta, deltas['noeolnotshared'])  | 
|
227  | 
        # eol folling a no-eol with content change
 | 
|
228  | 
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
229  | 
[(0, 1, 1, [('eol', 'phone\n')])])  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
230  | 
self.assertEqual(['phone\n'], f.get_lines('eol'))  | 
231  | 
self.assertEqual(expected_delta, deltas['eol'])  | 
|
232  | 
        # eol folling a no-eol with content change
 | 
|
233  | 
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
234  | 
[(0, 1, 1, [('eolline', 'line\n')])])  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
235  | 
self.assertEqual(['line\n'], f.get_lines('eolline'))  | 
236  | 
self.assertEqual(expected_delta, deltas['eolline'])  | 
|
237  | 
        # eol with no parents
 | 
|
238  | 
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
239  | 
[(0, 0, 1, [('noeolbase', 'line\n')])])  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
240  | 
self.assertEqual(['line'], f.get_lines('noeolbase'))  | 
241  | 
self.assertEqual(expected_delta, deltas['noeolbase'])  | 
|
242  | 
        # eol with two parents, in inverse insertion order
 | 
|
243  | 
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
244  | 
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]),  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
245  | 
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,  | 
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
246  | 
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]))  | 
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
247  | 
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))  | 
248  | 
        #self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
 | 
|
249  | 
||
250  | 
def _setup_for_deltas(self, f):  | 
|
| 
1596.2.36
by Robert Collins
 add a get_delta api to versioned_file.  | 
251  | 
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')  | 
252  | 
        # add texts that should trip the knit maximum delta chain threshold
 | 
|
253  | 
        # as well as doing parallel chains of data in knits.
 | 
|
254  | 
        # this is done by two chains of 25 insertions
 | 
|
255  | 
f.add_lines('base', [], ['line\n'])  | 
|
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
256  | 
f.add_lines('noeol', ['base'], ['line'])  | 
257  | 
        # detailed eol tests:
 | 
|
258  | 
        # shared last line with parent no-eol
 | 
|
259  | 
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])  | 
|
260  | 
        # differing last line with parent, both no-eol
 | 
|
261  | 
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])  | 
|
262  | 
        # add eol following a noneol parent, change content
 | 
|
263  | 
f.add_lines('eol', ['noeol'], ['phone\n'])  | 
|
264  | 
        # add eol following a noneol parent, no change content
 | 
|
265  | 
f.add_lines('eolline', ['noeol'], ['line\n'])  | 
|
266  | 
        # noeol with no parents:
 | 
|
267  | 
f.add_lines('noeolbase', [], ['line'])  | 
|
268  | 
        # noeol preceeding its leftmost parent in the output:
 | 
|
269  | 
        # this is done by making it a merge of two parents with no common
 | 
|
270  | 
        # anestry: noeolbase and noeol with the 
 | 
|
271  | 
        # later-inserted parent the leftmost.
 | 
|
272  | 
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])  | 
|
273  | 
        # two identical eol texts
 | 
|
274  | 
f.add_lines('noeoldup', ['noeol'], ['line'])  | 
|
| 
1596.2.36
by Robert Collins
 add a get_delta api to versioned_file.  | 
275  | 
next_parent = 'base'  | 
276  | 
text_name = 'chain1-'  | 
|
277  | 
text = ['line\n']  | 
|
278  | 
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',  | 
|
279  | 
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',  | 
|
280  | 
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',  | 
|
281  | 
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',  | 
|
282  | 
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',  | 
|
283  | 
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',  | 
|
284  | 
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',  | 
|
285  | 
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',  | 
|
286  | 
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',  | 
|
287  | 
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',  | 
|
288  | 
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',  | 
|
289  | 
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',  | 
|
290  | 
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',  | 
|
291  | 
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',  | 
|
292  | 
14:'2c4b1736566b8ca6051e668de68650686a3922f2',  | 
|
293  | 
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',  | 
|
294  | 
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',  | 
|
295  | 
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',  | 
|
296  | 
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',  | 
|
297  | 
19:'1ebed371807ba5935958ad0884595126e8c4e823',  | 
|
298  | 
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',  | 
|
299  | 
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',  | 
|
300  | 
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',  | 
|
301  | 
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',  | 
|
302  | 
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',  | 
|
303  | 
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',  | 
|
304  | 
                 }
 | 
|
305  | 
for depth in range(26):  | 
|
306  | 
new_version = text_name + '%s' % depth  | 
|
307  | 
text = text + ['line\n']  | 
|
308  | 
f.add_lines(new_version, [next_parent], text)  | 
|
309  | 
next_parent = new_version  | 
|
310  | 
next_parent = 'base'  | 
|
311  | 
text_name = 'chain2-'  | 
|
312  | 
text = ['line\n']  | 
|
313  | 
for depth in range(26):  | 
|
314  | 
new_version = text_name + '%s' % depth  | 
|
315  | 
text = text + ['line\n']  | 
|
316  | 
f.add_lines(new_version, [next_parent], text)  | 
|
317  | 
next_parent = new_version  | 
|
| 
1596.2.38
by Robert Collins
 rollback from using deltas to using fulltexts - deltas need more work to be ready.  | 
318  | 
return sha1s  | 
| 
1596.2.37
by Robert Collins
 Switch to delta based content copying in the generic versioned file copier.  | 
319  | 
|
320  | 
def test_add_delta(self):  | 
|
321  | 
        # tests for the add-delta facility.
 | 
|
322  | 
        # at this point, optimising for speed, we assume no checks when deltas are inserted.
 | 
|
323  | 
        # this may need to be revisited.
 | 
|
324  | 
source = self.get_file('source')  | 
|
325  | 
source.add_lines('base', [], ['line\n'])  | 
|
326  | 
next_parent = 'base'  | 
|
327  | 
text_name = 'chain1-'  | 
|
328  | 
text = ['line\n']  | 
|
329  | 
for depth in range(26):  | 
|
330  | 
new_version = text_name + '%s' % depth  | 
|
331  | 
text = text + ['line\n']  | 
|
332  | 
source.add_lines(new_version, [next_parent], text)  | 
|
333  | 
next_parent = new_version  | 
|
334  | 
next_parent = 'base'  | 
|
335  | 
text_name = 'chain2-'  | 
|
336  | 
text = ['line\n']  | 
|
337  | 
for depth in range(26):  | 
|
338  | 
new_version = text_name + '%s' % depth  | 
|
339  | 
text = text + ['line\n']  | 
|
340  | 
source.add_lines(new_version, [next_parent], text)  | 
|
341  | 
next_parent = new_version  | 
|
342  | 
source.add_lines('noeol', ['base'], ['line'])  | 
|
343  | 
||
344  | 
target = self.get_file('target')  | 
|
345  | 
for version in source.versions():  | 
|
346  | 
parent, sha1, noeol, delta = source.get_delta(version)  | 
|
347  | 
target.add_delta(version,  | 
|
348  | 
source.get_parents(version),  | 
|
349  | 
parent,  | 
|
350  | 
sha1,  | 
|
351  | 
noeol,  | 
|
352  | 
delta)  | 
|
353  | 
self.assertRaises(RevisionAlreadyPresent,  | 
|
354  | 
target.add_delta, 'base', [], None, '', False, [])  | 
|
355  | 
for version in source.versions():  | 
|
356  | 
self.assertEqual(source.get_lines(version),  | 
|
357  | 
target.get_lines(version))  | 
|
| 
1596.2.36
by Robert Collins
 add a get_delta api to versioned_file.  | 
358  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
359  | 
def test_ancestry(self):  | 
360  | 
f = self.get_file()  | 
|
| 
1563.2.29
by Robert Collins
 Remove all but fetch references to repository.revision_store.  | 
361  | 
self.assertEqual([], f.get_ancestry([]))  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
362  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
363  | 
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])  | 
|
364  | 
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])  | 
|
365  | 
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])  | 
|
366  | 
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])  | 
|
| 
1563.2.29
by Robert Collins
 Remove all but fetch references to repository.revision_store.  | 
367  | 
self.assertEqual([], f.get_ancestry([]))  | 
| 
1563.2.35
by Robert Collins
 cleanup deprecation warnings and finish conversion so the inventory is knit based too.  | 
368  | 
versions = f.get_ancestry(['rM'])  | 
369  | 
        # there are some possibilities:
 | 
|
370  | 
        # r0 r1 r2 rM r3
 | 
|
371  | 
        # r0 r1 r2 r3 rM
 | 
|
372  | 
        # etc
 | 
|
373  | 
        # so we check indexes
 | 
|
374  | 
r0 = versions.index('r0')  | 
|
375  | 
r1 = versions.index('r1')  | 
|
376  | 
r2 = versions.index('r2')  | 
|
377  | 
self.assertFalse('r3' in versions)  | 
|
378  | 
rM = versions.index('rM')  | 
|
379  | 
self.assertTrue(r0 < r1)  | 
|
380  | 
self.assertTrue(r0 < r2)  | 
|
381  | 
self.assertTrue(r1 < rM)  | 
|
382  | 
self.assertTrue(r2 < rM)  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
383  | 
|
384  | 
self.assertRaises(RevisionNotPresent,  | 
|
385  | 
f.get_ancestry, ['rM', 'rX'])  | 
|
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
386  | 
|
387  | 
def test_mutate_after_finish(self):  | 
|
388  | 
f = self.get_file()  | 
|
389  | 
f.transaction_finished()  | 
|
| 
1596.2.37
by Robert Collins
 Switch to delta based content copying in the generic versioned file copier.  | 
390  | 
self.assertRaises(errors.OutSideTransaction, f.add_delta, '', [], '', '', False, [])  | 
| 
1594.2.21
by Robert Collins
 Teach versioned files to prevent mutation after finishing.  | 
391  | 
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])  | 
392  | 
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])  | 
|
393  | 
self.assertRaises(errors.OutSideTransaction, f.fix_parents, '', [])  | 
|
394  | 
self.assertRaises(errors.OutSideTransaction, f.join, '')  | 
|
| 
1594.2.24
by Robert Collins
 Make use of the transaction finalisation warning support to implement in-knit caching.  | 
395  | 
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])  | 
| 
1563.2.7
by Robert Collins
 add versioned file clear_cache entry.  | 
396  | 
|
397  | 
def test_clear_cache(self):  | 
|
398  | 
f = self.get_file()  | 
|
399  | 
        # on a new file it should not error
 | 
|
400  | 
f.clear_cache()  | 
|
401  | 
        # and after adding content, doing a clear_cache and a get should work.
 | 
|
402  | 
f.add_lines('0', [], ['a'])  | 
|
403  | 
f.clear_cache()  | 
|
404  | 
self.assertEqual(['a'], f.get_lines('0'))  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
405  | 
|
406  | 
def test_clone_text(self):  | 
|
407  | 
f = self.get_file()  | 
|
408  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
| 
1563.2.5
by Robert Collins
 Remove unused transaction references from knit.py and the versionedfile interface.  | 
409  | 
f.clone_text('r1', 'r0', ['r0'])  | 
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
410  | 
def verify_file(f):  | 
411  | 
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))  | 
|
412  | 
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])  | 
|
413  | 
self.assertEquals(f.get_parents('r1'), ['r0'])  | 
|
414  | 
||
415  | 
self.assertRaises(RevisionNotPresent,  | 
|
416  | 
f.clone_text, 'r2', 'rX', [])  | 
|
417  | 
self.assertRaises(RevisionAlreadyPresent,  | 
|
418  | 
f.clone_text, 'r1', 'r0', [])  | 
|
419  | 
verify_file(f)  | 
|
420  | 
verify_file(self.reopen_file())  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
421  | 
|
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
422  | 
def test_create_empty(self):  | 
423  | 
f = self.get_file()  | 
|
424  | 
f.add_lines('0', [], ['a\n'])  | 
|
425  | 
new_f = f.create_empty('t', MemoryTransport())  | 
|
426  | 
        # smoke test, specific types should check it is honoured correctly for
 | 
|
427  | 
        # non type attributes
 | 
|
428  | 
self.assertEqual([], new_f.versions())  | 
|
429  | 
self.assertTrue(isinstance(new_f, f.__class__))  | 
|
430  | 
||
| 
1563.2.15
by Robert Collins
 remove the weavestore assumptions about the number and nature of files it manages.  | 
431  | 
def test_copy_to(self):  | 
432  | 
f = self.get_file()  | 
|
433  | 
f.add_lines('0', [], ['a\n'])  | 
|
434  | 
t = MemoryTransport()  | 
|
435  | 
f.copy_to('foo', t)  | 
|
436  | 
for suffix in f.__class__.get_suffixes():  | 
|
437  | 
self.assertTrue(t.has('foo' + suffix))  | 
|
438  | 
||
439  | 
def test_get_suffixes(self):  | 
|
440  | 
f = self.get_file()  | 
|
441  | 
        # should be the same
 | 
|
442  | 
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())  | 
|
443  | 
        # and should be a list
 | 
|
444  | 
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))  | 
|
445  | 
||
| 
1684.3.1
by Robert Collins
 Fix versioned file joins with empty targets.  | 
446  | 
def build_graph(self, file, graph):  | 
447  | 
for node in topo_sort(graph.items()):  | 
|
448  | 
file.add_lines(node, graph[node], [])  | 
|
449  | 
||
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
450  | 
def test_get_graph(self):  | 
451  | 
f = self.get_file()  | 
|
| 
1684.3.1
by Robert Collins
 Fix versioned file joins with empty targets.  | 
452  | 
graph = {  | 
453  | 
'v1': [],  | 
|
454  | 
'v2': ['v1'],  | 
|
455  | 
'v3': ['v2']}  | 
|
456  | 
self.build_graph(f, graph)  | 
|
457  | 
self.assertEqual(graph, f.get_graph())  | 
|
458  | 
||
459  | 
def test_get_graph_partial(self):  | 
|
460  | 
f = self.get_file()  | 
|
461  | 
complex_graph = {}  | 
|
462  | 
simple_a = {  | 
|
463  | 
'c': [],  | 
|
464  | 
'b': ['c'],  | 
|
465  | 
'a': ['b'],  | 
|
466  | 
            }
 | 
|
467  | 
complex_graph.update(simple_a)  | 
|
468  | 
simple_b = {  | 
|
469  | 
'c': [],  | 
|
470  | 
'b': ['c'],  | 
|
471  | 
            }
 | 
|
472  | 
complex_graph.update(simple_b)  | 
|
473  | 
simple_gam = {  | 
|
474  | 
'c': [],  | 
|
475  | 
'oo': [],  | 
|
476  | 
'bar': ['oo', 'c'],  | 
|
477  | 
'gam': ['bar'],  | 
|
478  | 
            }
 | 
|
479  | 
complex_graph.update(simple_gam)  | 
|
480  | 
simple_b_gam = {}  | 
|
481  | 
simple_b_gam.update(simple_gam)  | 
|
482  | 
simple_b_gam.update(simple_b)  | 
|
483  | 
self.build_graph(f, complex_graph)  | 
|
484  | 
self.assertEqual(simple_a, f.get_graph(['a']))  | 
|
485  | 
self.assertEqual(simple_b, f.get_graph(['b']))  | 
|
486  | 
self.assertEqual(simple_gam, f.get_graph(['gam']))  | 
|
487  | 
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))  | 
|
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
488  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
489  | 
def test_get_parents(self):  | 
490  | 
f = self.get_file()  | 
|
491  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
492  | 
f.add_lines('r1', [], ['a\n', 'b\n'])  | 
|
493  | 
f.add_lines('r2', [], ['a\n', 'b\n'])  | 
|
494  | 
f.add_lines('r3', [], ['a\n', 'b\n'])  | 
|
495  | 
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])  | 
|
496  | 
self.assertEquals(f.get_parents('m'), ['r0', 'r1', 'r2', 'r3'])  | 
|
497  | 
||
498  | 
self.assertRaises(RevisionNotPresent,  | 
|
499  | 
f.get_parents, 'y')  | 
|
500  | 
||
501  | 
def test_annotate(self):  | 
|
502  | 
f = self.get_file()  | 
|
503  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
504  | 
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])  | 
|
505  | 
origins = f.annotate('r1')  | 
|
506  | 
self.assertEquals(origins[0][0], 'r1')  | 
|
507  | 
self.assertEquals(origins[1][0], 'r0')  | 
|
508  | 
||
509  | 
self.assertRaises(RevisionNotPresent,  | 
|
510  | 
f.annotate, 'foo')  | 
|
511  | 
||
512  | 
def test_walk(self):  | 
|
| 
1563.2.35
by Robert Collins
 cleanup deprecation warnings and finish conversion so the inventory is knit based too.  | 
513  | 
        # tests that walk returns all the inclusions for the requested
 | 
514  | 
        # revisions as well as the revisions changes themselves.
 | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
515  | 
f = self.get_file('1')  | 
516  | 
f.add_lines('r0', [], ['a\n', 'b\n'])  | 
|
517  | 
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])  | 
|
518  | 
f.add_lines('rX', ['r1'], ['d\n', 'b\n'])  | 
|
519  | 
f.add_lines('rY', ['r1'], ['c\n', 'e\n'])  | 
|
520  | 
||
521  | 
lines = {}  | 
|
522  | 
for lineno, insert, dset, text in f.walk(['rX', 'rY']):  | 
|
523  | 
lines[text] = (insert, dset)  | 
|
524  | 
||
525  | 
self.assertTrue(lines['a\n'], ('r0', set(['r1'])))  | 
|
526  | 
self.assertTrue(lines['b\n'], ('r0', set(['rY'])))  | 
|
527  | 
self.assertTrue(lines['c\n'], ('r1', set(['rX'])))  | 
|
528  | 
self.assertTrue(lines['d\n'], ('rX', set([])))  | 
|
529  | 
self.assertTrue(lines['e\n'], ('rY', set([])))  | 
|
530  | 
||
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
531  | 
def test_detection(self):  | 
532  | 
        # Test weaves detect corruption.
 | 
|
533  | 
        #
 | 
|
534  | 
        # Weaves contain a checksum of their texts.
 | 
|
535  | 
        # When a text is extracted, this checksum should be
 | 
|
536  | 
        # verified.
 | 
|
537  | 
||
538  | 
w = self.get_file_corrupted_text()  | 
|
539  | 
||
540  | 
self.assertEqual('hello\n', w.get_text('v1'))  | 
|
541  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')  | 
|
542  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')  | 
|
543  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.check)  | 
|
544  | 
||
545  | 
w = self.get_file_corrupted_checksum()  | 
|
546  | 
||
547  | 
self.assertEqual('hello\n', w.get_text('v1'))  | 
|
548  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')  | 
|
549  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')  | 
|
550  | 
self.assertRaises(errors.WeaveInvalidChecksum, w.check)  | 
|
551  | 
||
552  | 
def get_file_corrupted_text(self):  | 
|
553  | 
"""Return a versioned file with corrupt text but valid metadata."""  | 
|
554  | 
raise NotImplementedError(self.get_file_corrupted_text)  | 
|
555  | 
||
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
556  | 
def reopen_file(self, name='foo'):  | 
557  | 
"""Open the versioned file from disk again."""  | 
|
558  | 
raise NotImplementedError(self.reopen_file)  | 
|
559  | 
||
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
560  | 
def test_iter_lines_added_or_present_in_versions(self):  | 
561  | 
        # test that we get at least an equalset of the lines added by
 | 
|
562  | 
        # versions in the weave 
 | 
|
563  | 
        # the ordering here is to make a tree so that dumb searches have
 | 
|
564  | 
        # more changes to muck up.
 | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
565  | 
|
566  | 
class InstrumentedProgress(progress.DummyProgress):  | 
|
567  | 
||
568  | 
def __init__(self):  | 
|
569  | 
||
570  | 
progress.DummyProgress.__init__(self)  | 
|
571  | 
self.updates = []  | 
|
572  | 
||
573  | 
def update(self, msg=None, current=None, total=None):  | 
|
574  | 
self.updates.append((msg, current, total))  | 
|
575  | 
||
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
576  | 
vf = self.get_file()  | 
577  | 
        # add a base to get included
 | 
|
578  | 
vf.add_lines('base', [], ['base\n'])  | 
|
579  | 
        # add a ancestor to be included on one side
 | 
|
580  | 
vf.add_lines('lancestor', [], ['lancestor\n'])  | 
|
581  | 
        # add a ancestor to be included on the other side
 | 
|
582  | 
vf.add_lines('rancestor', ['base'], ['rancestor\n'])  | 
|
583  | 
        # add a child of rancestor with no eofile-nl
 | 
|
584  | 
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])  | 
|
585  | 
        # add a child of lancestor and base to join the two roots
 | 
|
586  | 
vf.add_lines('otherchild',  | 
|
587  | 
['lancestor', 'base'],  | 
|
588  | 
['base\n', 'lancestor\n', 'otherchild\n'])  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
589  | 
def iter_with_versions(versions, expected):  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
590  | 
            # now we need to see what lines are returned, and how often.
 | 
591  | 
lines = {'base\n':0,  | 
|
592  | 
'lancestor\n':0,  | 
|
593  | 
'rancestor\n':0,  | 
|
594  | 
'child\n':0,  | 
|
595  | 
'otherchild\n':0,  | 
|
596  | 
                     }
 | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
597  | 
progress = InstrumentedProgress()  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
598  | 
            # iterate over the lines
 | 
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
599  | 
for line in vf.iter_lines_added_or_present_in_versions(versions,  | 
600  | 
pb=progress):  | 
|
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
601  | 
lines[line] += 1  | 
| 
2039.1.2
by Aaron Bentley
 Tweak test to avoid catching assert  | 
602  | 
if []!= progress.updates:  | 
603  | 
self.assertEqual(expected, progress.updates)  | 
|
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
604  | 
return lines  | 
| 
2147.1.3
by John Arbash Meinel
 In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.  | 
605  | 
lines = iter_with_versions(['child', 'otherchild'],  | 
606  | 
[('Walking content.', 0, 2),  | 
|
607  | 
('Walking content.', 1, 2),  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
608  | 
('Walking content.', 2, 2)])  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
609  | 
        # we must see child and otherchild
 | 
610  | 
self.assertTrue(lines['child\n'] > 0)  | 
|
611  | 
self.assertTrue(lines['otherchild\n'] > 0)  | 
|
612  | 
        # we dont care if we got more than that.
 | 
|
613  | 
||
614  | 
        # test all lines
 | 
|
| 
2147.1.3
by John Arbash Meinel
 In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated.  | 
615  | 
lines = iter_with_versions(None, [('Walking content.', 0, 5),  | 
616  | 
('Walking content.', 1, 5),  | 
|
617  | 
('Walking content.', 2, 5),  | 
|
618  | 
('Walking content.', 3, 5),  | 
|
619  | 
('Walking content.', 4, 5),  | 
|
| 
2039.1.1
by Aaron Bentley
 Clean up progress properly when interrupted during fetch (#54000)  | 
620  | 
('Walking content.', 5, 5)])  | 
| 
1594.2.6
by Robert Collins
 Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved.  | 
621  | 
        # all lines must be seen at least once
 | 
622  | 
self.assertTrue(lines['base\n'] > 0)  | 
|
623  | 
self.assertTrue(lines['lancestor\n'] > 0)  | 
|
624  | 
self.assertTrue(lines['rancestor\n'] > 0)  | 
|
625  | 
self.assertTrue(lines['child\n'] > 0)  | 
|
626  | 
self.assertTrue(lines['otherchild\n'] > 0)  | 
|
| 
1594.2.7
by Robert Collins
 Add versionedfile.fix_parents api for correcting data post hoc.  | 
627  | 
|
628  | 
def test_fix_parents(self):  | 
|
629  | 
        # some versioned files allow incorrect parents to be corrected after
 | 
|
630  | 
        # insertion - this may not fix ancestry..
 | 
|
631  | 
        # if they do not supported, they just do not implement it.
 | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
632  | 
        # we test this as an interface test to ensure that those that *do*
 | 
633  | 
        # implementent it get it right.
 | 
|
| 
1594.2.7
by Robert Collins
 Add versionedfile.fix_parents api for correcting data post hoc.  | 
634  | 
vf = self.get_file()  | 
635  | 
vf.add_lines('notbase', [], [])  | 
|
636  | 
vf.add_lines('base', [], [])  | 
|
637  | 
try:  | 
|
638  | 
vf.fix_parents('notbase', ['base'])  | 
|
639  | 
except NotImplementedError:  | 
|
640  | 
            return
 | 
|
641  | 
self.assertEqual(['base'], vf.get_parents('notbase'))  | 
|
642  | 
        # open again, check it stuck.
 | 
|
643  | 
vf = self.get_file()  | 
|
644  | 
self.assertEqual(['base'], vf.get_parents('notbase'))  | 
|
645  | 
||
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
646  | 
def test_fix_parents_with_ghosts(self):  | 
647  | 
        # when fixing parents, ghosts that are listed should not be ghosts
 | 
|
648  | 
        # anymore.
 | 
|
649  | 
vf = self.get_file()  | 
|
650  | 
||
651  | 
try:  | 
|
652  | 
vf.add_lines_with_ghosts('notbase', ['base', 'stillghost'], [])  | 
|
653  | 
except NotImplementedError:  | 
|
654  | 
            return
 | 
|
655  | 
vf.add_lines('base', [], [])  | 
|
656  | 
vf.fix_parents('notbase', ['base', 'stillghost'])  | 
|
657  | 
self.assertEqual(['base'], vf.get_parents('notbase'))  | 
|
658  | 
        # open again, check it stuck.
 | 
|
659  | 
vf = self.get_file()  | 
|
660  | 
self.assertEqual(['base'], vf.get_parents('notbase'))  | 
|
661  | 
        # and check the ghosts
 | 
|
662  | 
self.assertEqual(['base', 'stillghost'],  | 
|
663  | 
vf.get_parents_with_ghosts('notbase'))  | 
|
664  | 
||
665  | 
def test_add_lines_with_ghosts(self):  | 
|
666  | 
        # some versioned file formats allow lines to be added with parent
 | 
|
667  | 
        # information that is > than that in the format. Formats that do
 | 
|
668  | 
        # not support this need to raise NotImplementedError on the
 | 
|
669  | 
        # add_lines_with_ghosts api.
 | 
|
670  | 
vf = self.get_file()  | 
|
671  | 
        # add a revision with ghost parents
 | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
672  | 
        # The preferred form is utf8, but we should translate when needed
 | 
673  | 
parent_id_unicode = u'b\xbfse'  | 
|
674  | 
parent_id_utf8 = parent_id_unicode.encode('utf8')  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
675  | 
try:  | 
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
676  | 
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
677  | 
except NotImplementedError:  | 
678  | 
            # check the other ghost apis are also not implemented
 | 
|
679  | 
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')  | 
|
680  | 
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])  | 
|
681  | 
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')  | 
|
682  | 
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)  | 
|
683  | 
            return
 | 
|
| 
2150.2.1
by Robert Collins
 Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value.  | 
684  | 
vf = self.reopen_file()  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
685  | 
        # test key graph related apis: getncestry, _graph, get_parents
 | 
686  | 
        # has_version
 | 
|
687  | 
        # - these are ghost unaware and must not be reflect ghosts
 | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
688  | 
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))  | 
689  | 
self.assertEqual([], vf.get_parents('notbxbfse'))  | 
|
690  | 
self.assertEqual({'notbxbfse':[]}, vf.get_graph())  | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
691  | 
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],  | 
692  | 
vf.has_version, parent_id_unicode))  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
693  | 
self.assertFalse(vf.has_version(parent_id_utf8))  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
694  | 
        # we have _with_ghost apis to give us ghost information.
 | 
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
695  | 
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))  | 
696  | 
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))  | 
|
697  | 
self.assertEqual({'notbxbfse':[parent_id_utf8]}, vf.get_graph_with_ghosts())  | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
698  | 
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],  | 
699  | 
vf.has_ghost, parent_id_unicode))  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
700  | 
self.assertTrue(vf.has_ghost(parent_id_utf8))  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
701  | 
        # if we add something that is a ghost of another, it should correct the
 | 
702  | 
        # results of the prior apis
 | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
703  | 
self.callDeprecated([osutils._revision_id_warning],  | 
704  | 
vf.add_lines, parent_id_unicode, [], [])  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
705  | 
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))  | 
706  | 
self.assertEqual([parent_id_utf8], vf.get_parents('notbxbfse'))  | 
|
707  | 
self.assertEqual({parent_id_utf8:[],  | 
|
708  | 
'notbxbfse':[parent_id_utf8],  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
709  | 
                          },
 | 
710  | 
vf.get_graph())  | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
711  | 
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],  | 
712  | 
vf.has_version, parent_id_unicode))  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
713  | 
self.assertTrue(vf.has_version(parent_id_utf8))  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
714  | 
        # we have _with_ghost apis to give us ghost information.
 | 
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
715  | 
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))  | 
716  | 
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))  | 
|
717  | 
self.assertEqual({parent_id_utf8:[],  | 
|
718  | 
'notbxbfse':[parent_id_utf8],  | 
|
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
719  | 
                          },
 | 
720  | 
vf.get_graph_with_ghosts())  | 
|
| 
2309.4.7
by John Arbash Meinel
 Update VersionedFile tests to ensure that they can take Unicode,  | 
721  | 
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],  | 
722  | 
vf.has_ghost, parent_id_unicode))  | 
|
| 
2249.5.12
by John Arbash Meinel
 Change the APIs for VersionedFile, Store, and some of Repository into utf-8  | 
723  | 
self.assertFalse(vf.has_ghost(parent_id_utf8))  | 
| 
1594.2.8
by Robert Collins
 add ghost aware apis to knits.  | 
724  | 
|
| 
1594.2.9
by Robert Collins
 Teach Knit repositories how to handle ghosts without corrupting at all.  | 
725  | 
def test_add_lines_with_ghosts_after_normal_revs(self):  | 
726  | 
        # some versioned file formats allow lines to be added with parent
 | 
|
727  | 
        # information that is > than that in the format. Formats that do
 | 
|
728  | 
        # not support this need to raise NotImplementedError on the
 | 
|
729  | 
        # add_lines_with_ghosts api.
 | 
|
730  | 
vf = self.get_file()  | 
|
731  | 
        # probe for ghost support
 | 
|
732  | 
try:  | 
|
733  | 
vf.has_ghost('hoo')  | 
|
734  | 
except NotImplementedError:  | 
|
735  | 
            return
 | 
|
736  | 
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])  | 
|
737  | 
vf.add_lines_with_ghosts('references_ghost',  | 
|
738  | 
['base', 'a_ghost'],  | 
|
739  | 
['line\n', 'line_b\n', 'line_c\n'])  | 
|
740  | 
origins = vf.annotate('references_ghost')  | 
|
741  | 
self.assertEquals(('base', 'line\n'), origins[0])  | 
|
742  | 
self.assertEquals(('base', 'line_b\n'), origins[1])  | 
|
743  | 
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])  | 
|
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
744  | 
|
745  | 
def test_readonly_mode(self):  | 
|
746  | 
transport = get_transport(self.get_url('.'))  | 
|
747  | 
factory = self.get_factory()  | 
|
748  | 
vf = factory('id', transport, 0777, create=True, access_mode='w')  | 
|
749  | 
vf = factory('id', transport, access_mode='r')  | 
|
| 
1596.2.37
by Robert Collins
 Switch to delta based content copying in the generic versioned file copier.  | 
750  | 
self.assertRaises(errors.ReadOnlyError, vf.add_delta, '', [], '', '', False, [])  | 
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
751  | 
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])  | 
752  | 
self.assertRaises(errors.ReadOnlyError,  | 
|
753  | 
vf.add_lines_with_ghosts,  | 
|
754  | 
'base',  | 
|
755  | 
                          [],
 | 
|
756  | 
                          [])
 | 
|
757  | 
self.assertRaises(errors.ReadOnlyError, vf.fix_parents, 'base', [])  | 
|
758  | 
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')  | 
|
| 
1594.2.24
by Robert Collins
 Make use of the transaction finalisation warning support to implement in-knit caching.  | 
759  | 
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])  | 
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
760  | 
|
761  | 
def test_get_sha1(self):  | 
|
762  | 
        # check the sha1 data is available
 | 
|
763  | 
vf = self.get_file()  | 
|
764  | 
        # a simple file
 | 
|
765  | 
vf.add_lines('a', [], ['a\n'])  | 
|
766  | 
        # the same file, different metadata
 | 
|
767  | 
vf.add_lines('b', ['a'], ['a\n'])  | 
|
768  | 
        # a file differing only in last newline.
 | 
|
769  | 
vf.add_lines('c', [], ['a'])  | 
|
770  | 
self.assertEqual(  | 
|
771  | 
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))  | 
|
772  | 
self.assertEqual(  | 
|
773  | 
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))  | 
|
774  | 
self.assertEqual(  | 
|
775  | 
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))  | 
|
| 
1594.2.9
by Robert Collins
 Teach Knit repositories how to handle ghosts without corrupting at all.  | 
776  | 
|
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
777  | 
|
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
778  | 
class TestWeave(TestCaseWithTransport, VersionedFileTestMixIn):  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
779  | 
|
780  | 
def get_file(self, name='foo'):  | 
|
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
781  | 
return WeaveFile(name, get_transport(self.get_url('.')), create=True)  | 
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
782  | 
|
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
783  | 
def get_file_corrupted_text(self):  | 
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
784  | 
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)  | 
| 
1563.2.13
by Robert Collins
 InterVersionedFile implemented.  | 
785  | 
w.add_lines('v1', [], ['hello\n'])  | 
786  | 
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])  | 
|
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
787  | 
|
788  | 
        # We are going to invasively corrupt the text
 | 
|
789  | 
        # Make sure the internals of weave are the same
 | 
|
790  | 
self.assertEqual([('{', 0)  | 
|
791  | 
, 'hello\n'  | 
|
792  | 
, ('}', None)  | 
|
793  | 
, ('{', 1)  | 
|
794  | 
, 'there\n'  | 
|
795  | 
, ('}', None)  | 
|
796  | 
], w._weave)  | 
|
797  | 
||
798  | 
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'  | 
|
799  | 
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'  | 
|
800  | 
], w._sha1s)  | 
|
801  | 
w.check()  | 
|
802  | 
||
803  | 
        # Corrupted
 | 
|
804  | 
w._weave[4] = 'There\n'  | 
|
805  | 
return w  | 
|
806  | 
||
807  | 
def get_file_corrupted_checksum(self):  | 
|
808  | 
w = self.get_file_corrupted_text()  | 
|
809  | 
        # Corrected
 | 
|
810  | 
w._weave[4] = 'there\n'  | 
|
811  | 
self.assertEqual('hello\nthere\n', w.get_text('v2'))  | 
|
812  | 
||
813  | 
        #Invalid checksum, first digit changed
 | 
|
814  | 
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'  | 
|
815  | 
return w  | 
|
816  | 
||
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
817  | 
def reopen_file(self, name='foo', create=False):  | 
818  | 
return WeaveFile(name, get_transport(self.get_url('.')), create=create)  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
819  | 
|
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
820  | 
def test_no_implicit_create(self):  | 
821  | 
self.assertRaises(errors.NoSuchFile,  | 
|
822  | 
WeaveFile,  | 
|
823  | 
'foo',  | 
|
824  | 
get_transport(self.get_url('.')))  | 
|
825  | 
||
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
826  | 
def get_factory(self):  | 
827  | 
return WeaveFile  | 
|
828  | 
||
| 
1563.2.1
by Robert Collins
 Merge in a variation of the versionedfile api from versioned-file.  | 
829  | 
|
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
830  | 
class TestKnit(TestCaseWithTransport, VersionedFileTestMixIn):  | 
| 
1563.2.4
by Robert Collins
 First cut at including the knit implementation of versioned_file.  | 
831  | 
|
832  | 
def get_file(self, name='foo'):  | 
|
| 
1563.2.16
by Robert Collins
 Change WeaveStore into VersionedFileStore and make its versoined file class parameterisable.  | 
833  | 
return KnitVersionedFile(name, get_transport(self.get_url('.')),  | 
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
834  | 
delta=True, create=True)  | 
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
835  | 
|
| 
1594.2.23
by Robert Collins
 Test versioned file storage handling of clean/dirty status for accessed versioned files.  | 
836  | 
def get_factory(self):  | 
837  | 
return KnitVersionedFile  | 
|
838  | 
||
| 
1563.2.6
by Robert Collins
 Start check tests for knits (pending), and remove dead code.  | 
839  | 
def get_file_corrupted_text(self):  | 
840  | 
knit = self.get_file()  | 
|
841  | 
knit.add_lines('v1', [], ['hello\n'])  | 
|
842  | 
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])  | 
|
843  | 
return knit  | 
|
| 
1563.2.9
by Robert Collins
 Update versionedfile api tests to ensure that data is available after every operation.  | 
844  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
845  | 
def reopen_file(self, name='foo', create=False):  | 
846  | 
return KnitVersionedFile(name, get_transport(self.get_url('.')),  | 
|
847  | 
delta=True,  | 
|
848  | 
create=create)  | 
|
| 
1563.2.10
by Robert Collins
 Change weave store to be a versioned store, using WeaveFiles which maintain integrity without needing explicit 'put' operations.  | 
849  | 
|
850  | 
def test_detection(self):  | 
|
| 
1563.2.19
by Robert Collins
 stub out a check for knits.  | 
851  | 
knit = self.get_file()  | 
852  | 
knit.check()  | 
|
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
853  | 
|
| 
1563.2.25
by Robert Collins
 Merge in upstream.  | 
854  | 
def test_no_implicit_create(self):  | 
855  | 
self.assertRaises(errors.NoSuchFile,  | 
|
856  | 
KnitVersionedFile,  | 
|
857  | 
'foo',  | 
|
858  | 
get_transport(self.get_url('.')))  | 
|
859  | 
||
| 
1563.2.12
by Robert Collins
 Checkpointing: created InterObject to factor out common inter object worker code, added InterVersionedFile and tests to allow making join work between any versionedfile.  | 
860  | 
|
861  | 
class InterString(versionedfile.InterVersionedFile):  | 
|
862  | 
"""An inter-versionedfile optimised code path for strings.  | 
|
863  | 
||
864  | 
    This is for use during testing where we use strings as versionedfiles
 | 
|
865  | 
    so that none of the default regsitered interversionedfile classes will
 | 
|
866  | 
    match - which lets us test the match logic.
 | 
|
867  | 
    """
 | 
|
868  | 
||
869  | 
    @staticmethod
 | 
|
870  | 
def is_compatible(source, target):  | 
|
871  | 
"""InterString is compatible with strings-as-versionedfiles."""  | 
|
872  | 
return isinstance(source, str) and isinstance(target, str)  | 
|
873  | 
||
874  | 
||
875  | 
# TODO this and the InterRepository core logic should be consolidatable
 | 
|
876  | 
# if we make the registry a separate class though we still need to 
 | 
|
877  | 
# test the behaviour in the active registry to catch failure-to-handle-
 | 
|
878  | 
# stange-objects
 | 
|
879  | 
class TestInterVersionedFile(TestCaseWithTransport):  | 
|
880  | 
||
881  | 
def test_get_default_inter_versionedfile(self):  | 
|
882  | 
        # test that the InterVersionedFile.get(a, b) probes
 | 
|
883  | 
        # for a class where is_compatible(a, b) returns
 | 
|
884  | 
        # true and returns a default interversionedfile otherwise.
 | 
|
885  | 
        # This also tests that the default registered optimised interversionedfile
 | 
|
886  | 
        # classes do not barf inappropriately when a surprising versionedfile type
 | 
|
887  | 
        # is handed to them.
 | 
|
888  | 
dummy_a = "VersionedFile 1."  | 
|
889  | 
dummy_b = "VersionedFile 2."  | 
|
890  | 
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)  | 
|
891  | 
||
892  | 
def assertGetsDefaultInterVersionedFile(self, a, b):  | 
|
893  | 
"""Asserts that InterVersionedFile.get(a, b) -> the default."""  | 
|
894  | 
inter = versionedfile.InterVersionedFile.get(a, b)  | 
|
895  | 
self.assertEqual(versionedfile.InterVersionedFile,  | 
|
896  | 
inter.__class__)  | 
|
897  | 
self.assertEqual(a, inter.source)  | 
|
898  | 
self.assertEqual(b, inter.target)  | 
|
899  | 
||
900  | 
def test_register_inter_versionedfile_class(self):  | 
|
901  | 
        # test that a optimised code path provider - a
 | 
|
902  | 
        # InterVersionedFile subclass can be registered and unregistered
 | 
|
903  | 
        # and that it is correctly selected when given a versionedfile
 | 
|
904  | 
        # pair that it returns true on for the is_compatible static method
 | 
|
905  | 
        # check
 | 
|
906  | 
dummy_a = "VersionedFile 1."  | 
|
907  | 
dummy_b = "VersionedFile 2."  | 
|
908  | 
versionedfile.InterVersionedFile.register_optimiser(InterString)  | 
|
909  | 
try:  | 
|
910  | 
            # we should get the default for something InterString returns False
 | 
|
911  | 
            # to
 | 
|
912  | 
self.assertFalse(InterString.is_compatible(dummy_a, None))  | 
|
913  | 
self.assertGetsDefaultInterVersionedFile(dummy_a, None)  | 
|
914  | 
            # and we should get an InterString for a pair it 'likes'
 | 
|
915  | 
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))  | 
|
916  | 
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)  | 
|
917  | 
self.assertEqual(InterString, inter.__class__)  | 
|
918  | 
self.assertEqual(dummy_a, inter.source)  | 
|
919  | 
self.assertEqual(dummy_b, inter.target)  | 
|
920  | 
finally:  | 
|
921  | 
versionedfile.InterVersionedFile.unregister_optimiser(InterString)  | 
|
922  | 
        # now we should get the default InterVersionedFile object again.
 | 
|
923  | 
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)  | 
|
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
924  | 
|
925  | 
||
926  | 
class TestReadonlyHttpMixin(object):  | 
|
927  | 
||
928  | 
def test_readonly_http_works(self):  | 
|
929  | 
        # we should be able to read from http with a versioned file.
 | 
|
930  | 
vf = self.get_file()  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
931  | 
        # try an empty file access
 | 
932  | 
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))  | 
|
933  | 
self.assertEqual([], readonly_vf.versions())  | 
|
934  | 
        # now with feeling.
 | 
|
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
935  | 
vf.add_lines('1', [], ['a\n'])  | 
936  | 
vf.add_lines('2', ['1'], ['b\n', 'a\n'])  | 
|
937  | 
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))  | 
|
| 
1666.1.6
by Robert Collins
 Make knit the default format.  | 
938  | 
self.assertEqual(['1', '2'], vf.versions())  | 
| 
1666.1.1
by Robert Collins
 Add trivial http-using test for versioned files.  | 
939  | 
for version in readonly_vf.versions():  | 
940  | 
readonly_vf.get_lines(version)  | 
|
941  | 
||
942  | 
||
943  | 
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):  | 
|
944  | 
||
945  | 
def get_file(self):  | 
|
946  | 
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)  | 
|
947  | 
||
948  | 
def get_factory(self):  | 
|
949  | 
return WeaveFile  | 
|
950  | 
||
951  | 
||
952  | 
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):  | 
|
953  | 
||
954  | 
def get_file(self):  | 
|
955  | 
return KnitVersionedFile('foo', get_transport(self.get_url('.')),  | 
|
956  | 
delta=True, create=True)  | 
|
957  | 
||
958  | 
def get_factory(self):  | 
|
959  | 
return KnitVersionedFile  | 
|
| 
1664.2.9
by Aaron Bentley
 Ported weave merge test to versionedfile  | 
960  | 
|
961  | 
||
962  | 
class MergeCasesMixin(object):  | 
|
963  | 
||
964  | 
def doMerge(self, base, a, b, mp):  | 
|
965  | 
from cStringIO import StringIO  | 
|
966  | 
from textwrap import dedent  | 
|
967  | 
||
968  | 
def addcrlf(x):  | 
|
969  | 
return x + '\n'  | 
|
970  | 
||
971  | 
w = self.get_file()  | 
|
972  | 
w.add_lines('text0', [], map(addcrlf, base))  | 
|
973  | 
w.add_lines('text1', ['text0'], map(addcrlf, a))  | 
|
974  | 
w.add_lines('text2', ['text0'], map(addcrlf, b))  | 
|
975  | 
||
976  | 
self.log_contents(w)  | 
|
977  | 
||
978  | 
self.log('merge plan:')  | 
|
979  | 
p = list(w.plan_merge('text1', 'text2'))  | 
|
980  | 
for state, line in p:  | 
|
981  | 
if line:  | 
|
982  | 
self.log('%12s | %s' % (state, line[:-1]))  | 
|
983  | 
||
984  | 
self.log('merge:')  | 
|
985  | 
mt = StringIO()  | 
|
986  | 
mt.writelines(w.weave_merge(p))  | 
|
987  | 
mt.seek(0)  | 
|
988  | 
self.log(mt.getvalue())  | 
|
989  | 
||
990  | 
mp = map(addcrlf, mp)  | 
|
991  | 
self.assertEqual(mt.readlines(), mp)  | 
|
992  | 
||
993  | 
||
994  | 
def testOneInsert(self):  | 
|
995  | 
self.doMerge([],  | 
|
996  | 
['aa'],  | 
|
997  | 
                     [],
 | 
|
998  | 
['aa'])  | 
|
999  | 
||
1000  | 
def testSeparateInserts(self):  | 
|
1001  | 
self.doMerge(['aaa', 'bbb', 'ccc'],  | 
|
1002  | 
['aaa', 'xxx', 'bbb', 'ccc'],  | 
|
1003  | 
['aaa', 'bbb', 'yyy', 'ccc'],  | 
|
1004  | 
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])  | 
|
1005  | 
||
1006  | 
def testSameInsert(self):  | 
|
1007  | 
self.doMerge(['aaa', 'bbb', 'ccc'],  | 
|
1008  | 
['aaa', 'xxx', 'bbb', 'ccc'],  | 
|
1009  | 
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],  | 
|
1010  | 
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])  | 
|
1011  | 
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']  | 
|
1012  | 
def testOverlappedInsert(self):  | 
|
1013  | 
self.doMerge(['aaa', 'bbb'],  | 
|
1014  | 
['aaa', 'xxx', 'yyy', 'bbb'],  | 
|
1015  | 
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)  | 
|
1016  | 
||
1017  | 
        # really it ought to reduce this to 
 | 
|
1018  | 
        # ['aaa', 'xxx', 'yyy', 'bbb']
 | 
|
1019  | 
||
1020  | 
||
1021  | 
def testClashReplace(self):  | 
|
1022  | 
self.doMerge(['aaa'],  | 
|
1023  | 
['xxx'],  | 
|
1024  | 
['yyy', 'zzz'],  | 
|
1025  | 
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',  | 
|
1026  | 
'>>>>>>> '])  | 
|
1027  | 
||
1028  | 
def testNonClashInsert1(self):  | 
|
1029  | 
self.doMerge(['aaa'],  | 
|
1030  | 
['xxx', 'aaa'],  | 
|
1031  | 
['yyy', 'zzz'],  | 
|
1032  | 
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',  | 
|
1033  | 
'>>>>>>> '])  | 
|
1034  | 
||
1035  | 
def testNonClashInsert2(self):  | 
|
1036  | 
self.doMerge(['aaa'],  | 
|
1037  | 
['aaa'],  | 
|
1038  | 
['yyy', 'zzz'],  | 
|
1039  | 
['yyy', 'zzz'])  | 
|
1040  | 
||
1041  | 
||
1042  | 
def testDeleteAndModify(self):  | 
|
1043  | 
"""Clashing delete and modification.  | 
|
1044  | 
||
1045  | 
        If one side modifies a region and the other deletes it then
 | 
|
1046  | 
        there should be a conflict with one side blank.
 | 
|
1047  | 
        """
 | 
|
1048  | 
||
1049  | 
        #######################################
 | 
|
1050  | 
        # skippd, not working yet
 | 
|
1051  | 
        return
 | 
|
1052  | 
||
1053  | 
self.doMerge(['aaa', 'bbb', 'ccc'],  | 
|
1054  | 
['aaa', 'ddd', 'ccc'],  | 
|
1055  | 
['aaa', 'ccc'],  | 
|
1056  | 
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])  | 
|
1057  | 
||
1058  | 
def _test_merge_from_strings(self, base, a, b, expected):  | 
|
1059  | 
w = self.get_file()  | 
|
1060  | 
w.add_lines('text0', [], base.splitlines(True))  | 
|
1061  | 
w.add_lines('text1', ['text0'], a.splitlines(True))  | 
|
1062  | 
w.add_lines('text2', ['text0'], b.splitlines(True))  | 
|
1063  | 
self.log('merge plan:')  | 
|
1064  | 
p = list(w.plan_merge('text1', 'text2'))  | 
|
1065  | 
for state, line in p:  | 
|
1066  | 
if line:  | 
|
1067  | 
self.log('%12s | %s' % (state, line[:-1]))  | 
|
1068  | 
self.log('merge result:')  | 
|
1069  | 
result_text = ''.join(w.weave_merge(p))  | 
|
1070  | 
self.log(result_text)  | 
|
1071  | 
self.assertEqualDiff(result_text, expected)  | 
|
1072  | 
||
1073  | 
def test_weave_merge_conflicts(self):  | 
|
1074  | 
        # does weave merge properly handle plans that end with unchanged?
 | 
|
1075  | 
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))  | 
|
1076  | 
self.assertEqual(result, 'hello\n')  | 
|
1077  | 
||
1078  | 
def test_deletion_extended(self):  | 
|
1079  | 
"""One side deletes, the other deletes more.  | 
|
1080  | 
        """
 | 
|
1081  | 
base = """\  | 
|
1082  | 
            line 1
 | 
|
1083  | 
            line 2
 | 
|
1084  | 
            line 3
 | 
|
1085  | 
            """
 | 
|
1086  | 
a = """\  | 
|
1087  | 
            line 1
 | 
|
1088  | 
            line 2
 | 
|
1089  | 
            """
 | 
|
1090  | 
b = """\  | 
|
1091  | 
            line 1
 | 
|
1092  | 
            """
 | 
|
1093  | 
result = """\  | 
|
1094  | 
            line 1
 | 
|
1095  | 
            """
 | 
|
1096  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1097  | 
||
1098  | 
def test_deletion_overlap(self):  | 
|
1099  | 
"""Delete overlapping regions with no other conflict.  | 
|
1100  | 
||
1101  | 
        Arguably it'd be better to treat these as agreement, rather than 
 | 
|
1102  | 
        conflict, but for now conflict is safer.
 | 
|
1103  | 
        """
 | 
|
1104  | 
base = """\  | 
|
1105  | 
            start context
 | 
|
1106  | 
int a() {}  | 
|
1107  | 
int b() {}  | 
|
1108  | 
int c() {}  | 
|
1109  | 
            end context
 | 
|
1110  | 
            """
 | 
|
1111  | 
a = """\  | 
|
1112  | 
            start context
 | 
|
1113  | 
int a() {}  | 
|
1114  | 
            end context
 | 
|
1115  | 
            """
 | 
|
1116  | 
b = """\  | 
|
1117  | 
            start context
 | 
|
1118  | 
int c() {}  | 
|
1119  | 
            end context
 | 
|
1120  | 
            """
 | 
|
1121  | 
result = """\  | 
|
1122  | 
            start context
 | 
|
1123  | 
<<<<<<< 
 | 
|
1124  | 
int a() {}  | 
|
1125  | 
=======
 | 
|
1126  | 
int c() {}  | 
|
1127  | 
>>>>>>> 
 | 
|
1128  | 
            end context
 | 
|
1129  | 
            """
 | 
|
1130  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1131  | 
||
1132  | 
def test_agreement_deletion(self):  | 
|
1133  | 
"""Agree to delete some lines, without conflicts."""  | 
|
1134  | 
base = """\  | 
|
1135  | 
            start context
 | 
|
1136  | 
            base line 1
 | 
|
1137  | 
            base line 2
 | 
|
1138  | 
            end context
 | 
|
1139  | 
            """
 | 
|
1140  | 
a = """\  | 
|
1141  | 
            start context
 | 
|
1142  | 
            base line 1
 | 
|
1143  | 
            end context
 | 
|
1144  | 
            """
 | 
|
1145  | 
b = """\  | 
|
1146  | 
            start context
 | 
|
1147  | 
            base line 1
 | 
|
1148  | 
            end context
 | 
|
1149  | 
            """
 | 
|
1150  | 
result = """\  | 
|
1151  | 
            start context
 | 
|
1152  | 
            base line 1
 | 
|
1153  | 
            end context
 | 
|
1154  | 
            """
 | 
|
1155  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1156  | 
||
1157  | 
def test_sync_on_deletion(self):  | 
|
1158  | 
"""Specific case of merge where we can synchronize incorrectly.  | 
|
1159  | 
        
 | 
|
1160  | 
        A previous version of the weave merge concluded that the two versions
 | 
|
1161  | 
        agreed on deleting line 2, and this could be a synchronization point.
 | 
|
1162  | 
        Line 1 was then considered in isolation, and thought to be deleted on 
 | 
|
1163  | 
        both sides.
 | 
|
1164  | 
||
1165  | 
        It's better to consider the whole thing as a disagreement region.
 | 
|
1166  | 
        """
 | 
|
1167  | 
base = """\  | 
|
1168  | 
            start context
 | 
|
1169  | 
            base line 1
 | 
|
1170  | 
            base line 2
 | 
|
1171  | 
            end context
 | 
|
1172  | 
            """
 | 
|
1173  | 
a = """\  | 
|
1174  | 
            start context
 | 
|
1175  | 
            base line 1
 | 
|
1176  | 
            a's replacement line 2
 | 
|
1177  | 
            end context
 | 
|
1178  | 
            """
 | 
|
1179  | 
b = """\  | 
|
1180  | 
            start context
 | 
|
1181  | 
            b replaces
 | 
|
1182  | 
            both lines
 | 
|
1183  | 
            end context
 | 
|
1184  | 
            """
 | 
|
1185  | 
result = """\  | 
|
1186  | 
            start context
 | 
|
1187  | 
<<<<<<< 
 | 
|
1188  | 
            base line 1
 | 
|
1189  | 
            a's replacement line 2
 | 
|
1190  | 
=======
 | 
|
1191  | 
            b replaces
 | 
|
1192  | 
            both lines
 | 
|
1193  | 
>>>>>>> 
 | 
|
1194  | 
            end context
 | 
|
1195  | 
            """
 | 
|
1196  | 
self._test_merge_from_strings(base, a, b, result)  | 
|
1197  | 
||
1198  | 
||
1199  | 
class TestKnitMerge(TestCaseWithTransport, MergeCasesMixin):  | 
|
1200  | 
||
1201  | 
def get_file(self, name='foo'):  | 
|
1202  | 
return KnitVersionedFile(name, get_transport(self.get_url('.')),  | 
|
1203  | 
delta=True, create=True)  | 
|
1204  | 
||
1205  | 
def log_contents(self, w):  | 
|
1206  | 
        pass
 | 
|
1207  | 
||
1208  | 
||
1209  | 
class TestWeaveMerge(TestCaseWithTransport, MergeCasesMixin):  | 
|
1210  | 
||
1211  | 
def get_file(self, name='foo'):  | 
|
1212  | 
return WeaveFile(name, get_transport(self.get_url('.')), create=True)  | 
|
1213  | 
||
1214  | 
def log_contents(self, w):  | 
|
1215  | 
self.log('weave is:')  | 
|
1216  | 
tmpf = StringIO()  | 
|
1217  | 
write_weave(w, tmpf)  | 
|
1218  | 
self.log(tmpf.getvalue())  | 
|
1219  | 
||
1220  | 
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',  | 
|
1221  | 
'xxx', '>>>>>>> ', 'bbb']  |