bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
1 |
# Copyright (C) 2006-2012, 2016 Canonical Ltd
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
2 |
#
|
3 |
# Authors:
|
|
4 |
# Johan Rydberg <jrydberg@gnu.org>
|
|
5 |
#
|
|
6 |
# This program is free software; you can redistribute it and/or modify
|
|
7 |
# it under the terms of the GNU General Public License as published by
|
|
8 |
# the Free Software Foundation; either version 2 of the License, or
|
|
9 |
# (at your option) any later version.
|
|
|
1887.1.1
by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines, |
10 |
#
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
11 |
# This program is distributed in the hope that it will be useful,
|
12 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14 |
# GNU General Public License for more details.
|
|
|
1887.1.1
by Adeodato Simó
Do not separate paragraphs in the copyright statement with blank lines, |
15 |
#
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
16 |
# You should have received a copy of the GNU General Public License
|
17 |
# along with this program; if not, write to the Free Software
|
|
|
4183.7.1
by Sabin Iacob
update FSF mailing address |
18 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
19 |
|
20 |
||
|
1704.2.15
by Martin Pool
Remove TODO about knit testing printed from test suite |
21 |
# TODO: might be nice to create a versionedfile with some type of corruption
|
22 |
# considered typical and check that it can be detected/corrected.
|
|
23 |
||
|
5590.1.1
by John Arbash Meinel
Stop using tuned_gzip, it seems to give incorrect results on python 2.7 |
24 |
from gzip import GzipFile |
|
6631.2.3
by Martin
Fix per_versionedfile test failures and rethink future_builtins |
25 |
import itertools |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
26 |
|
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
27 |
from .. import ( |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
28 |
errors, |
|
4593.5.35
by John Arbash Meinel
Start working on a per-vf implementation test of find_ancestry. |
29 |
graph as _mod_graph, |
|
2309.4.7
by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode, |
30 |
osutils, |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
31 |
progress, |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
32 |
transport, |
|
4332.3.26
by Robert Collins
Allow passing keys to check to VersionedFile.check(). |
33 |
ui, |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
34 |
)
|
|
6670.4.1
by Jelmer Vernooij
Update imports. |
35 |
from ..bzr import ( |
36 |
groupcompress, |
|
37 |
knit as _mod_knit, |
|
38 |
)
|
|
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
39 |
from ..errors import ( |
|
6670.4.1
by Jelmer Vernooij
Update imports. |
40 |
RevisionNotPresent, |
41 |
RevisionAlreadyPresent, |
|
42 |
)
|
|
43 |
from ..bzr.knit import ( |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
44 |
cleanup_pack_knit, |
45 |
make_file_factory, |
|
46 |
make_pack_factory, |
|
|
2770.1.1
by Aaron Bentley
Initial implmentation of plain knit annotation |
47 |
)
|
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
48 |
from ..sixish import ( |
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
49 |
BytesIO, |
|
6631.2.3
by Martin
Fix per_versionedfile test failures and rethink future_builtins |
50 |
zip, |
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
51 |
)
|
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
52 |
from . import ( |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
53 |
TestCase, |
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
54 |
TestCaseWithMemoryTransport, |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
55 |
TestNotApplicable, |
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
56 |
TestSkipped, |
57 |
)
|
|
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
58 |
from .http_utils import TestCaseWithWebserver |
59 |
from ..transport.memory import MemoryTransport |
|
|
6670.4.1
by Jelmer Vernooij
Update imports. |
60 |
from ..bzr import versionedfile as versionedfile |
61 |
from ..bzr.versionedfile import ( |
|
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
62 |
ConstantMapper, |
63 |
HashEscapedPrefixMapper, |
|
64 |
PrefixMapper, |
|
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
65 |
VirtualVersionedFiles, |
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
66 |
make_versioned_files_factory, |
67 |
)
|
|
|
6670.4.1
by Jelmer Vernooij
Update imports. |
68 |
from ..bzr.weave import WeaveFile |
69 |
from ..bzr.weavefile import write_weave |
|
|
6624
by Jelmer Vernooij
Merge Python3 porting work ('py3 pokes') |
70 |
from .scenarios import load_tests_apply_scenarios |
|
5559.2.2
by Martin Pool
Change to using standard load_tests_apply_scenarios. |
71 |
|
72 |
||
73 |
load_tests = load_tests_apply_scenarios |
|
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
74 |
|
75 |
||
|
3350.3.11
by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error. |
76 |
def get_diamond_vf(f, trailing_eol=True, left_only=False): |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
77 |
"""Get a diamond graph to exercise deltas and merges. |
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
78 |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
79 |
:param trailing_eol: If True end the last line with \n.
|
80 |
"""
|
|
81 |
parents = { |
|
82 |
'origin': (), |
|
83 |
'base': (('origin',),), |
|
84 |
'left': (('base',),), |
|
85 |
'right': (('base',),), |
|
86 |
'merged': (('left',), ('right',)), |
|
87 |
}
|
|
88 |
# insert a diamond graph to exercise deltas and merges.
|
|
89 |
if trailing_eol: |
|
90 |
last_char = '\n' |
|
91 |
else: |
|
92 |
last_char = '' |
|
93 |
f.add_lines('origin', [], ['origin' + last_char]) |
|
94 |
f.add_lines('base', ['origin'], ['base' + last_char]) |
|
95 |
f.add_lines('left', ['base'], ['base\n', 'left' + last_char]) |
|
|
3350.3.11
by Robert Collins
Test inserting a stream that overlaps the current content of a knit does not error. |
96 |
if not left_only: |
97 |
f.add_lines('right', ['base'], |
|
98 |
['base\n', 'right' + last_char]) |
|
99 |
f.add_lines('merged', ['left', 'right'], |
|
100 |
['base\n', 'left\n', 'right\n', 'merged' + last_char]) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
101 |
return f, parents |
102 |
||
103 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
104 |
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False, |
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
105 |
nograph=False, nokeys=False): |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
106 |
"""Get a diamond graph to exercise deltas and merges. |
107 |
||
108 |
This creates a 5-node graph in files. If files supports 2-length keys two
|
|
109 |
graphs are made to exercise the support for multiple ids.
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
110 |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
111 |
:param trailing_eol: If True end the last line with \n.
|
112 |
:param key_length: The length of keys in files. Currently supports length 1
|
|
113 |
and 2 keys.
|
|
114 |
:param left_only: If True do not add the right and merged nodes.
|
|
115 |
:param nograph: If True, do not provide parents to the add_lines calls;
|
|
116 |
this is useful for tests that need inserted data but have graphless
|
|
117 |
stores.
|
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
118 |
:param nokeys: If True, pass None is as the key for all insertions.
|
119 |
Currently implies nograph.
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
120 |
:return: The results of the add_lines calls.
|
121 |
"""
|
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
122 |
if nokeys: |
123 |
nograph = True |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
124 |
if key_length == 1: |
125 |
prefixes = [()] |
|
126 |
else: |
|
127 |
prefixes = [('FileA',), ('FileB',)] |
|
128 |
# insert a diamond graph to exercise deltas and merges.
|
|
129 |
if trailing_eol: |
|
130 |
last_char = '\n' |
|
131 |
else: |
|
132 |
last_char = '' |
|
133 |
result = [] |
|
134 |
def get_parents(suffix_list): |
|
135 |
if nograph: |
|
136 |
return () |
|
137 |
else: |
|
138 |
result = [prefix + suffix for suffix in suffix_list] |
|
139 |
return result |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
140 |
def get_key(suffix): |
141 |
if nokeys: |
|
142 |
return (None, ) |
|
143 |
else: |
|
144 |
return (suffix,) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
145 |
# we loop over each key because that spreads the inserts across prefixes,
|
146 |
# which is how commit operates.
|
|
147 |
for prefix in prefixes: |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
148 |
result.append(files.add_lines(prefix + get_key('origin'), (), |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
149 |
['origin' + last_char])) |
150 |
for prefix in prefixes: |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
151 |
result.append(files.add_lines(prefix + get_key('base'), |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
152 |
get_parents([('origin',)]), ['base' + last_char])) |
153 |
for prefix in prefixes: |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
154 |
result.append(files.add_lines(prefix + get_key('left'), |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
155 |
get_parents([('base',)]), |
156 |
['base\n', 'left' + last_char])) |
|
157 |
if not left_only: |
|
158 |
for prefix in prefixes: |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
159 |
result.append(files.add_lines(prefix + get_key('right'), |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
160 |
get_parents([('base',)]), |
161 |
['base\n', 'right' + last_char])) |
|
162 |
for prefix in prefixes: |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
163 |
result.append(files.add_lines(prefix + get_key('merged'), |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
164 |
get_parents([('left',), ('right',)]), |
165 |
['base\n', 'left\n', 'right\n', 'merged' + last_char])) |
|
166 |
return result |
|
167 |
||
168 |
||
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
169 |
class VersionedFileTestMixIn(object): |
170 |
"""A mixin test class for testing VersionedFiles. |
|
171 |
||
172 |
This is not an adaptor-style test at this point because
|
|
173 |
theres no dynamic substitution of versioned file implementations,
|
|
174 |
they are strictly controlled by their owning repositories.
|
|
175 |
"""
|
|
176 |
||
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
177 |
def get_transaction(self): |
178 |
if not hasattr(self, '_transaction'): |
|
179 |
self._transaction = None |
|
180 |
return self._transaction |
|
181 |
||
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
182 |
def test_add(self): |
183 |
f = self.get_file() |
|
184 |
f.add_lines('r0', [], ['a\n', 'b\n']) |
|
185 |
f.add_lines('r1', ['r0'], ['b\n', 'c\n']) |
|
|
1563.2.9
by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation. |
186 |
def verify_file(f): |
187 |
versions = f.versions() |
|
188 |
self.assertTrue('r0' in versions) |
|
189 |
self.assertTrue('r1' in versions) |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
190 |
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n']) |
191 |
self.assertEqual(f.get_text('r0'), 'a\nb\n') |
|
192 |
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n']) |
|
|
1563.2.18
by Robert Collins
get knit repositories really using knits for text storage. |
193 |
self.assertEqual(2, len(f)) |
194 |
self.assertEqual(2, f.num_versions()) |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
195 |
|
|
1563.2.9
by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation. |
196 |
self.assertRaises(RevisionNotPresent, |
197 |
f.add_lines, 'r2', ['foo'], []) |
|
198 |
self.assertRaises(RevisionAlreadyPresent, |
|
199 |
f.add_lines, 'r1', [], []) |
|
200 |
verify_file(f) |
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
201 |
# this checks that reopen with create=True does not break anything.
|
202 |
f = self.reopen_file(create=True) |
|
|
1563.2.9
by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation. |
203 |
verify_file(f) |
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
204 |
|
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
205 |
def test_adds_with_parent_texts(self): |
206 |
f = self.get_file() |
|
207 |
parent_texts = {} |
|
|
2776.1.1
by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed |
208 |
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n']) |
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
209 |
try: |
|
2776.1.1
by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed |
210 |
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1', |
211 |
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts) |
|
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
212 |
except NotImplementedError: |
213 |
# if the format doesn't support ghosts, just add normally.
|
|
|
2776.1.1
by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed |
214 |
_, _, parent_texts['r1'] = f.add_lines('r1', |
215 |
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts) |
|
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
216 |
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts) |
217 |
self.assertNotEqual(None, parent_texts['r0']) |
|
218 |
self.assertNotEqual(None, parent_texts['r1']) |
|
219 |
def verify_file(f): |
|
220 |
versions = f.versions() |
|
221 |
self.assertTrue('r0' in versions) |
|
222 |
self.assertTrue('r1' in versions) |
|
223 |
self.assertTrue('r2' in versions) |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
224 |
self.assertEqual(f.get_lines('r0'), ['a\n', 'b\n']) |
225 |
self.assertEqual(f.get_lines('r1'), ['b\n', 'c\n']) |
|
226 |
self.assertEqual(f.get_lines('r2'), ['c\n', 'd\n']) |
|
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
227 |
self.assertEqual(3, f.num_versions()) |
228 |
origins = f.annotate('r1') |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
229 |
self.assertEqual(origins[0][0], 'r0') |
230 |
self.assertEqual(origins[1][0], 'r1') |
|
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
231 |
origins = f.annotate('r2') |
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
232 |
self.assertEqual(origins[0][0], 'r1') |
233 |
self.assertEqual(origins[1][0], 'r2') |
|
|
1596.2.32
by Robert Collins
Reduce re-extraction of texts during weave to knit joins by providing a memoisation facility. |
234 |
|
235 |
verify_file(f) |
|
236 |
f = self.reopen_file() |
|
237 |
verify_file(f) |
|
238 |
||
|
2805.6.7
by Robert Collins
Review feedback. |
239 |
def test_add_unicode_content(self): |
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
240 |
# unicode content is not permitted in versioned files.
|
|
2805.6.7
by Robert Collins
Review feedback. |
241 |
# versioned files version sequences of bytes only.
|
242 |
vf = self.get_file() |
|
243 |
self.assertRaises(errors.BzrBadParameterUnicode, |
|
244 |
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n']) |
|
245 |
self.assertRaises( |
|
246 |
(errors.BzrBadParameterUnicode, NotImplementedError), |
|
247 |
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n']) |
|
248 |
||
|
2520.4.150
by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines |
249 |
def test_add_follows_left_matching_blocks(self): |
250 |
"""If we change left_matching_blocks, delta changes |
|
251 |
||
252 |
Note: There are multiple correct deltas in this case, because
|
|
253 |
we start with 1 "a" and we get 3.
|
|
254 |
"""
|
|
255 |
vf = self.get_file() |
|
256 |
if isinstance(vf, WeaveFile): |
|
257 |
raise TestSkipped("WeaveFile ignores left_matching_blocks") |
|
258 |
vf.add_lines('1', [], ['a\n']) |
|
259 |
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'], |
|
260 |
left_matching_blocks=[(0, 0, 1), (1, 3, 0)]) |
|
|
2794.1.2
by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts. |
261 |
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2')) |
|
2520.4.150
by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines |
262 |
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'], |
263 |
left_matching_blocks=[(0, 2, 1), (1, 3, 0)]) |
|
|
2794.1.2
by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts. |
264 |
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3')) |
|
2520.4.150
by Aaron Bentley
Test that non-Weave uses left_matching_blocks for add_lines |
265 |
|
|
2805.6.7
by Robert Collins
Review feedback. |
266 |
def test_inline_newline_throws(self): |
267 |
# \r characters are not permitted in lines being added
|
|
268 |
vf = self.get_file() |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
269 |
self.assertRaises(errors.BzrBadParameterContainsNewline, |
|
2805.6.7
by Robert Collins
Review feedback. |
270 |
vf.add_lines, 'a', [], ['a\n\n']) |
271 |
self.assertRaises( |
|
272 |
(errors.BzrBadParameterContainsNewline, NotImplementedError), |
|
273 |
vf.add_lines_with_ghosts, 'a', [], ['a\n\n']) |
|
274 |
# but inline CR's are allowed
|
|
275 |
vf.add_lines('a', [], ['a\r\n']) |
|
276 |
try: |
|
277 |
vf.add_lines_with_ghosts('b', [], ['a\r\n']) |
|
278 |
except NotImplementedError: |
|
279 |
pass
|
|
280 |
||
|
2229.2.1
by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository |
281 |
def test_add_reserved(self): |
282 |
vf = self.get_file() |
|
283 |
self.assertRaises(errors.ReservedId, |
|
284 |
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n']) |
|
285 |
||
|
2794.1.1
by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit. |
286 |
def test_add_lines_nostoresha(self): |
287 |
"""When nostore_sha is supplied using old content raises.""" |
|
288 |
vf = self.get_file() |
|
289 |
empty_text = ('a', []) |
|
290 |
sample_text_nl = ('b', ["foo\n", "bar\n"]) |
|
291 |
sample_text_no_nl = ('c', ["foo\n", "bar"]) |
|
292 |
shas = [] |
|
293 |
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl): |
|
294 |
sha, _, _ = vf.add_lines(version, [], lines) |
|
295 |
shas.append(sha) |
|
296 |
# we now have a copy of all the lines in the vf.
|
|
297 |
for sha, (version, lines) in zip( |
|
298 |
shas, (empty_text, sample_text_nl, sample_text_no_nl)): |
|
299 |
self.assertRaises(errors.ExistingContent, |
|
300 |
vf.add_lines, version + "2", [], lines, |
|
301 |
nostore_sha=sha) |
|
302 |
# and no new version should have been added.
|
|
303 |
self.assertRaises(errors.RevisionNotPresent, vf.get_lines, |
|
304 |
version + "2") |
|
305 |
||
|
2803.1.1
by Robert Collins
Fix typo in ghosts version of test_add_lines_nostoresha. |
306 |
def test_add_lines_with_ghosts_nostoresha(self): |
|
2794.1.1
by Robert Collins
Allow knits to be instructed not to add a text based on a sha, for commit. |
307 |
"""When nostore_sha is supplied using old content raises.""" |
308 |
vf = self.get_file() |
|
309 |
empty_text = ('a', []) |
|
310 |
sample_text_nl = ('b', ["foo\n", "bar\n"]) |
|
311 |
sample_text_no_nl = ('c', ["foo\n", "bar"]) |
|
312 |
shas = [] |
|
313 |
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl): |
|
314 |
sha, _, _ = vf.add_lines(version, [], lines) |
|
315 |
shas.append(sha) |
|
316 |
# we now have a copy of all the lines in the vf.
|
|
317 |
# is the test applicable to this vf implementation?
|
|
318 |
try: |
|
319 |
vf.add_lines_with_ghosts('d', [], []) |
|
320 |
except NotImplementedError: |
|
321 |
raise TestSkipped("add_lines_with_ghosts is optional") |
|
322 |
for sha, (version, lines) in zip( |
|
323 |
shas, (empty_text, sample_text_nl, sample_text_no_nl)): |
|
324 |
self.assertRaises(errors.ExistingContent, |
|
325 |
vf.add_lines_with_ghosts, version + "2", [], lines, |
|
326 |
nostore_sha=sha) |
|
327 |
# and no new version should have been added.
|
|
328 |
self.assertRaises(errors.RevisionNotPresent, vf.get_lines, |
|
329 |
version + "2") |
|
330 |
||
|
2776.1.1
by Robert Collins
* The ``add_lines`` methods on ``VersionedFile`` implementations has changed |
331 |
def test_add_lines_return_value(self): |
332 |
# add_lines should return the sha1 and the text size.
|
|
333 |
vf = self.get_file() |
|
334 |
empty_text = ('a', []) |
|
335 |
sample_text_nl = ('b', ["foo\n", "bar\n"]) |
|
336 |
sample_text_no_nl = ('c', ["foo\n", "bar"]) |
|
337 |
# check results for the three cases:
|
|
338 |
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl): |
|
339 |
# the first two elements are the same for all versioned files:
|
|
340 |
# - the digest and the size of the text. For some versioned files
|
|
341 |
# additional data is returned in additional tuple elements.
|
|
342 |
result = vf.add_lines(version, [], lines) |
|
343 |
self.assertEqual(3, len(result)) |
|
344 |
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))), |
|
345 |
result[0:2]) |
|
346 |
# parents should not affect the result:
|
|
347 |
lines = sample_text_nl[1] |
|
348 |
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))), |
|
349 |
vf.add_lines('d', ['b', 'c'], lines)[0:2]) |
|
350 |
||
|
2229.2.1
by Aaron Bentley
Reject reserved ids in versiondfile, tree, branch and repository |
351 |
def test_get_reserved(self): |
352 |
vf = self.get_file() |
|
353 |
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:']) |
|
354 |
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:') |
|
355 |
self.assertRaises(errors.ReservedId, vf.get_text, 'b:') |
|
356 |
||
|
3468.2.4
by Martin Pool
Test and fix #234748 problems in trailing newline diffs |
357 |
def test_add_unchanged_last_line_noeol_snapshot(self): |
358 |
"""Add a text with an unchanged last line with no eol should work.""" |
|
359 |
# Test adding this in a number of chain lengths; because the interface
|
|
360 |
# for VersionedFile does not allow forcing a specific chain length, we
|
|
361 |
# just use a small base to get the first snapshot, then a much longer
|
|
362 |
# first line for the next add (which will make the third add snapshot)
|
|
363 |
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
|
|
364 |
# as a capped delta length, but ideally we would have some way of
|
|
365 |
# tuning the test to the store (e.g. keep going until a snapshot
|
|
366 |
# happens).
|
|
367 |
for length in range(20): |
|
368 |
version_lines = {} |
|
369 |
vf = self.get_file('case-%d' % length) |
|
370 |
prefix = 'step-%d' |
|
371 |
parents = [] |
|
372 |
for step in range(length): |
|
373 |
version = prefix % step |
|
374 |
lines = (['prelude \n'] * step) + ['line'] |
|
375 |
vf.add_lines(version, parents, lines) |
|
376 |
version_lines[version] = lines |
|
377 |
parents = [version] |
|
378 |
vf.add_lines('no-eol', parents, ['line']) |
|
379 |
vf.get_texts(version_lines.keys()) |
|
380 |
self.assertEqualDiff('line', vf.get_text('no-eol')) |
|
381 |
||
382 |
def test_get_texts_eol_variation(self): |
|
383 |
# similar to the failure in <http://bugs.launchpad.net/234748>
|
|
384 |
vf = self.get_file() |
|
385 |
sample_text_nl = ["line\n"] |
|
386 |
sample_text_no_nl = ["line"] |
|
387 |
versions = [] |
|
388 |
version_lines = {} |
|
389 |
parents = [] |
|
390 |
for i in range(4): |
|
391 |
version = 'v%d' % i |
|
392 |
if i % 2: |
|
393 |
lines = sample_text_nl |
|
394 |
else: |
|
395 |
lines = sample_text_no_nl |
|
396 |
# left_matching blocks is an internal api; it operates on the
|
|
397 |
# *internal* representation for a knit, which is with *all* lines
|
|
398 |
# being normalised to end with \n - even the final line in a no_nl
|
|
399 |
# file. Using it here ensures that a broken internal implementation
|
|
400 |
# (which is what this test tests) will generate a correct line
|
|
401 |
# delta (which is to say, an empty delta).
|
|
402 |
vf.add_lines(version, parents, lines, |
|
403 |
left_matching_blocks=[(0, 0, 1)]) |
|
404 |
parents = [version] |
|
405 |
versions.append(version) |
|
406 |
version_lines[version] = lines |
|
407 |
vf.check() |
|
408 |
vf.get_texts(versions) |
|
409 |
vf.get_texts(reversed(versions)) |
|
410 |
||
|
3460.2.1
by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing |
411 |
def test_add_lines_with_matching_blocks_noeol_last_line(self): |
412 |
"""Add a text with an unchanged last line with no eol should work.""" |
|
|
6622.1.34
by Jelmer Vernooij
Rename brzlib => breezy. |
413 |
from breezy import multiparent |
|
3460.2.1
by Robert Collins
* Inserting a bundle which changes the contents of a file with no trailing |
414 |
# Hand verified sha1 of the text we're adding.
|
415 |
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4' |
|
416 |
# Create a mpdiff which adds a new line before the trailing line, and
|
|
417 |
# reuse the last line unaltered (which can cause annotation reuse).
|
|
418 |
# Test adding this in two situations:
|
|
419 |
# On top of a new insertion
|
|
420 |
vf = self.get_file('fulltext') |
|
421 |
vf.add_lines('noeol', [], ['line']) |
|
422 |
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'], |
|
423 |
left_matching_blocks=[(0, 1, 1)]) |
|
424 |
self.assertEqualDiff('newline\nline', vf.get_text('noeol2')) |
|
425 |
# On top of a delta
|
|
426 |
vf = self.get_file('delta') |
|
427 |
vf.add_lines('base', [], ['line']) |
|
428 |
vf.add_lines('noeol', ['base'], ['prelude\n', 'line']) |
|
429 |
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'], |
|
430 |
left_matching_blocks=[(1, 1, 1)]) |
|
431 |
self.assertEqualDiff('newline\nline', vf.get_text('noeol2')) |
|
432 |
||
|
2520.4.85
by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!) |
433 |
def test_make_mpdiffs(self): |
|
6622.1.34
by Jelmer Vernooij
Rename brzlib => breezy. |
434 |
from breezy import multiparent |
|
2520.4.3
by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs |
435 |
vf = self.get_file('foo') |
436 |
sha1s = self._setup_for_deltas(vf) |
|
437 |
new_vf = self.get_file('bar') |
|
438 |
for version in multiparent.topo_iter(vf): |
|
|
2520.4.85
by Aaron Bentley
Get all test passing (which just proves there aren't enough tests!) |
439 |
mpdiff = vf.make_mpdiffs([version])[0] |
|
3287.5.2
by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code. |
440 |
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version], |
|
3350.8.3
by Robert Collins
VF.get_sha1s needed changing to be stackable. |
441 |
vf.get_sha1s([version])[version], mpdiff)]) |
|
2520.4.3
by Aaron Bentley
Implement plain strategy for extracting and installing multiparent diffs |
442 |
self.assertEqualDiff(vf.get_text(version), |
443 |
new_vf.get_text(version)) |
|
444 |
||
|
3453.3.2
by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop |
445 |
def test_make_mpdiffs_with_ghosts(self): |
446 |
vf = self.get_file('foo') |
|
|
3453.3.4
by John Arbash Meinel
Skip the new test for old weave formats that don't support ghosts |
447 |
try: |
448 |
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n']) |
|
449 |
except NotImplementedError: |
|
450 |
# old Weave formats do not allow ghosts
|
|
451 |
return
|
|
|
3453.3.2
by John Arbash Meinel
Add a test case for the first loop, unable to find a way to trigger the second loop |
452 |
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost']) |
453 |
||
|
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
454 |
def _setup_for_deltas(self, f): |
|
2794.1.2
by Robert Collins
Nuke versioned file add/get delta support, allowing easy simplification of unannotated Content, reducing memory copies and friction during commit on unannotated texts. |
455 |
self.assertFalse(f.has_version('base')) |
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
456 |
# add texts that should trip the knit maximum delta chain threshold
|
457 |
# as well as doing parallel chains of data in knits.
|
|
458 |
# this is done by two chains of 25 insertions
|
|
459 |
f.add_lines('base', [], ['line\n']) |
|
|
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
460 |
f.add_lines('noeol', ['base'], ['line']) |
461 |
# detailed eol tests:
|
|
462 |
# shared last line with parent no-eol
|
|
463 |
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line']) |
|
464 |
# differing last line with parent, both no-eol
|
|
465 |
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone']) |
|
466 |
# add eol following a noneol parent, change content
|
|
467 |
f.add_lines('eol', ['noeol'], ['phone\n']) |
|
468 |
# add eol following a noneol parent, no change content
|
|
469 |
f.add_lines('eolline', ['noeol'], ['line\n']) |
|
470 |
# noeol with no parents:
|
|
471 |
f.add_lines('noeolbase', [], ['line']) |
|
472 |
# noeol preceeding its leftmost parent in the output:
|
|
473 |
# this is done by making it a merge of two parents with no common
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
474 |
# anestry: noeolbase and noeol with the
|
|
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
475 |
# later-inserted parent the leftmost.
|
476 |
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line']) |
|
477 |
# two identical eol texts
|
|
478 |
f.add_lines('noeoldup', ['noeol'], ['line']) |
|
|
1596.2.36
by Robert Collins
add a get_delta api to versioned_file. |
479 |
next_parent = 'base' |
480 |
text_name = 'chain1-' |
|
481 |
text = ['line\n'] |
|
482 |
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079', |
|
483 |
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7', |
|
484 |
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa', |
|
485 |
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1', |
|
486 |
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6', |
|
487 |
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d', |
|
488 |
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea', |
|
489 |
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab', |
|
490 |
8 :'779e9a0b28f9f832528d4b21e17e168c67697272', |
|
491 |
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f', |
|
492 |
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05', |
|
493 |
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199', |
|
494 |
12:'31a2286267f24d8bedaa43355f8ad7129509ea85', |
|
495 |
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a', |
|
496 |
14:'2c4b1736566b8ca6051e668de68650686a3922f2', |
|
497 |
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde', |
|
498 |
16:'b0d2e18d3559a00580f6b49804c23fea500feab3', |
|
499 |
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7', |
|
500 |
18:'5cf64a3459ae28efa60239e44b20312d25b253f3', |
|
501 |
19:'1ebed371807ba5935958ad0884595126e8c4e823', |
|
502 |
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3', |
|
503 |
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d', |
|
504 |
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb', |
|
505 |
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f', |
|
506 |
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc', |
|
507 |
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855', |
|
508 |
}
|
|
509 |
for depth in range(26): |
|
510 |
new_version = text_name + '%s' % depth |
|
511 |
text = text + ['line\n'] |
|
512 |
f.add_lines(new_version, [next_parent], text) |
|
513 |
next_parent = new_version |
|
514 |
next_parent = 'base' |
|
515 |
text_name = 'chain2-' |
|
516 |
text = ['line\n'] |
|
517 |
for depth in range(26): |
|
518 |
new_version = text_name + '%s' % depth |
|
519 |
text = text + ['line\n'] |
|
520 |
f.add_lines(new_version, [next_parent], text) |
|
521 |
next_parent = new_version |
|
|
1596.2.38
by Robert Collins
rollback from using deltas to using fulltexts - deltas need more work to be ready. |
522 |
return sha1s |
|
1596.2.37
by Robert Collins
Switch to delta based content copying in the generic versioned file copier. |
523 |
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
524 |
def test_ancestry(self): |
525 |
f = self.get_file() |
|
|
1563.2.29
by Robert Collins
Remove all but fetch references to repository.revision_store. |
526 |
self.assertEqual([], f.get_ancestry([])) |
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
527 |
f.add_lines('r0', [], ['a\n', 'b\n']) |
528 |
f.add_lines('r1', ['r0'], ['b\n', 'c\n']) |
|
529 |
f.add_lines('r2', ['r0'], ['b\n', 'c\n']) |
|
530 |
f.add_lines('r3', ['r2'], ['b\n', 'c\n']) |
|
531 |
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n']) |
|
|
1563.2.29
by Robert Collins
Remove all but fetch references to repository.revision_store. |
532 |
self.assertEqual([], f.get_ancestry([])) |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
533 |
versions = f.get_ancestry(['rM']) |
534 |
# there are some possibilities:
|
|
535 |
# r0 r1 r2 rM r3
|
|
536 |
# r0 r1 r2 r3 rM
|
|
537 |
# etc
|
|
538 |
# so we check indexes
|
|
539 |
r0 = versions.index('r0') |
|
540 |
r1 = versions.index('r1') |
|
541 |
r2 = versions.index('r2') |
|
542 |
self.assertFalse('r3' in versions) |
|
543 |
rM = versions.index('rM') |
|
544 |
self.assertTrue(r0 < r1) |
|
545 |
self.assertTrue(r0 < r2) |
|
546 |
self.assertTrue(r1 < rM) |
|
547 |
self.assertTrue(r2 < rM) |
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
548 |
|
549 |
self.assertRaises(RevisionNotPresent, |
|
550 |
f.get_ancestry, ['rM', 'rX']) |
|
|
1594.2.21
by Robert Collins
Teach versioned files to prevent mutation after finishing. |
551 |
|
|
2530.1.1
by Aaron Bentley
Make topological sorting optional for get_ancestry |
552 |
self.assertEqual(set(f.get_ancestry('rM')), |
553 |
set(f.get_ancestry('rM', topo_sorted=False))) |
|
554 |
||
|
1594.2.21
by Robert Collins
Teach versioned files to prevent mutation after finishing. |
555 |
def test_mutate_after_finish(self): |
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
556 |
self._transaction = 'before' |
|
1594.2.21
by Robert Collins
Teach versioned files to prevent mutation after finishing. |
557 |
f = self.get_file() |
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
558 |
self._transaction = 'after' |
|
1594.2.21
by Robert Collins
Teach versioned files to prevent mutation after finishing. |
559 |
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], []) |
560 |
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], []) |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
561 |
|
|
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
562 |
def test_copy_to(self): |
563 |
f = self.get_file() |
|
564 |
f.add_lines('0', [], ['a\n']) |
|
565 |
t = MemoryTransport() |
|
566 |
f.copy_to('foo', t) |
|
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
567 |
for suffix in self.get_factory().get_suffixes(): |
|
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
568 |
self.assertTrue(t.has('foo' + suffix)) |
569 |
||
570 |
def test_get_suffixes(self): |
|
571 |
f = self.get_file() |
|
572 |
# and should be a list
|
|
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
573 |
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list)) |
|
1563.2.15
by Robert Collins
remove the weavestore assumptions about the number and nature of files it manages. |
574 |
|
|
3287.5.1
by Robert Collins
Add VersionedFile.get_parent_map. |
575 |
def test_get_parent_map(self): |
576 |
f = self.get_file() |
|
577 |
f.add_lines('r0', [], ['a\n', 'b\n']) |
|
578 |
self.assertEqual( |
|
579 |
{'r0':()}, f.get_parent_map(['r0'])) |
|
580 |
f.add_lines('r1', ['r0'], ['a\n', 'b\n']) |
|
581 |
self.assertEqual( |
|
582 |
{'r1':('r0',)}, f.get_parent_map(['r1'])) |
|
583 |
self.assertEqual( |
|
584 |
{'r0':(), |
|
585 |
'r1':('r0',)}, |
|
586 |
f.get_parent_map(['r0', 'r1'])) |
|
587 |
f.add_lines('r2', [], ['a\n', 'b\n']) |
|
588 |
f.add_lines('r3', [], ['a\n', 'b\n']) |
|
589 |
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n']) |
|
590 |
self.assertEqual( |
|
591 |
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m'])) |
|
592 |
self.assertEqual({}, f.get_parent_map('y')) |
|
593 |
self.assertEqual( |
|
594 |
{'r0':(), |
|
595 |
'r1':('r0',)}, |
|
596 |
f.get_parent_map(['r0', 'y', 'r1'])) |
|
597 |
||
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
598 |
def test_annotate(self): |
599 |
f = self.get_file() |
|
600 |
f.add_lines('r0', [], ['a\n', 'b\n']) |
|
601 |
f.add_lines('r1', ['r0'], ['c\n', 'b\n']) |
|
602 |
origins = f.annotate('r1') |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
603 |
self.assertEqual(origins[0][0], 'r1') |
604 |
self.assertEqual(origins[1][0], 'r0') |
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
605 |
|
606 |
self.assertRaises(RevisionNotPresent, |
|
607 |
f.annotate, 'foo') |
|
608 |
||
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
609 |
def test_detection(self): |
610 |
# Test weaves detect corruption.
|
|
611 |
#
|
|
612 |
# Weaves contain a checksum of their texts.
|
|
613 |
# When a text is extracted, this checksum should be
|
|
614 |
# verified.
|
|
615 |
||
616 |
w = self.get_file_corrupted_text() |
|
617 |
||
618 |
self.assertEqual('hello\n', w.get_text('v1')) |
|
|
5582.9.20
by Jelmer Vernooij
remove some of the weave changes. |
619 |
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2') |
620 |
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2') |
|
621 |
self.assertRaises(errors.WeaveInvalidChecksum, w.check) |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
622 |
|
623 |
w = self.get_file_corrupted_checksum() |
|
624 |
||
625 |
self.assertEqual('hello\n', w.get_text('v1')) |
|
|
5582.9.20
by Jelmer Vernooij
remove some of the weave changes. |
626 |
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2') |
627 |
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2') |
|
628 |
self.assertRaises(errors.WeaveInvalidChecksum, w.check) |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
629 |
|
630 |
def get_file_corrupted_text(self): |
|
631 |
"""Return a versioned file with corrupt text but valid metadata.""" |
|
632 |
raise NotImplementedError(self.get_file_corrupted_text) |
|
633 |
||
|
1563.2.9
by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation. |
634 |
def reopen_file(self, name='foo'): |
635 |
"""Open the versioned file from disk again.""" |
|
636 |
raise NotImplementedError(self.reopen_file) |
|
637 |
||
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
638 |
def test_iter_lines_added_or_present_in_versions(self): |
639 |
# test that we get at least an equalset of the lines added by
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
640 |
# versions in the weave
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
641 |
# the ordering here is to make a tree so that dumb searches have
|
642 |
# more changes to muck up.
|
|
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
643 |
|
|
4961.2.9
by Martin Pool
Rip out most remaining uses of DummyProgressBar |
644 |
class InstrumentedProgress(progress.ProgressTask): |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
645 |
|
646 |
def __init__(self): |
|
|
4961.2.9
by Martin Pool
Rip out most remaining uses of DummyProgressBar |
647 |
progress.ProgressTask.__init__(self) |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
648 |
self.updates = [] |
649 |
||
650 |
def update(self, msg=None, current=None, total=None): |
|
651 |
self.updates.append((msg, current, total)) |
|
652 |
||
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
653 |
vf = self.get_file() |
654 |
# add a base to get included
|
|
655 |
vf.add_lines('base', [], ['base\n']) |
|
656 |
# add a ancestor to be included on one side
|
|
657 |
vf.add_lines('lancestor', [], ['lancestor\n']) |
|
658 |
# add a ancestor to be included on the other side
|
|
659 |
vf.add_lines('rancestor', ['base'], ['rancestor\n']) |
|
660 |
# add a child of rancestor with no eofile-nl
|
|
661 |
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n']) |
|
662 |
# add a child of lancestor and base to join the two roots
|
|
663 |
vf.add_lines('otherchild', |
|
664 |
['lancestor', 'base'], |
|
665 |
['base\n', 'lancestor\n', 'otherchild\n']) |
|
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
666 |
def iter_with_versions(versions, expected): |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
667 |
# now we need to see what lines are returned, and how often.
|
|
2975.3.1
by Robert Collins
Change (without backwards compatibility) the |
668 |
lines = {} |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
669 |
progress = InstrumentedProgress() |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
670 |
# iterate over the lines
|
|
2975.3.1
by Robert Collins
Change (without backwards compatibility) the |
671 |
for line in vf.iter_lines_added_or_present_in_versions(versions, |
|
2039.1.1
by Aaron Bentley
Clean up progress properly when interrupted during fetch (#54000) |
672 |
pb=progress): |
|
2975.3.1
by Robert Collins
Change (without backwards compatibility) the |
673 |
lines.setdefault(line, 0) |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
674 |
lines[line] += 1 |
|
2975.3.1
by Robert Collins
Change (without backwards compatibility) the |
675 |
if []!= progress.updates: |
|
2039.1.2
by Aaron Bentley
Tweak test to avoid catching assert |
676 |
self.assertEqual(expected, progress.updates) |
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
677 |
return lines |
|
2147.1.3
by John Arbash Meinel
In knit.py we were re-using a variable in 2 loops, causing bogus progress messages to be generated. |
678 |
lines = iter_with_versions(['child', 'otherchild'], |
|
4103.3.4
by Martin Pool
Update test that depends on progress bar strings |
679 |
[('Walking content', 0, 2), |
680 |
('Walking content', 1, 2), |
|
681 |
('Walking content', 2, 2)]) |
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
682 |
# we must see child and otherchild
|
|
2975.3.1
by Robert Collins
Change (without backwards compatibility) the |
683 |
self.assertTrue(lines[('child\n', 'child')] > 0) |
684 |
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0) |
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
685 |
# we dont care if we got more than that.
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
686 |
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
687 |
# test all lines
|
|
4103.3.4
by Martin Pool
Update test that depends on progress bar strings |
688 |
lines = iter_with_versions(None, [('Walking content', 0, 5), |
689 |
('Walking content', 1, 5), |
|
690 |
('Walking content', 2, 5), |
|
691 |
('Walking content', 3, 5), |
|
692 |
('Walking content', 4, 5), |
|
693 |
('Walking content', 5, 5)]) |
|
|
1594.2.6
by Robert Collins
Introduce a api specifically for looking at lines in some versions of the inventory, for fileid_involved. |
694 |
# all lines must be seen at least once
|
|
2975.3.1
by Robert Collins
Change (without backwards compatibility) the |
695 |
self.assertTrue(lines[('base\n', 'base')] > 0) |
696 |
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0) |
|
697 |
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0) |
|
698 |
self.assertTrue(lines[('child\n', 'child')] > 0) |
|
699 |
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0) |
|
|
1594.2.7
by Robert Collins
Add versionedfile.fix_parents api for correcting data post hoc. |
700 |
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
701 |
def test_add_lines_with_ghosts(self): |
702 |
# some versioned file formats allow lines to be added with parent
|
|
703 |
# information that is > than that in the format. Formats that do
|
|
704 |
# not support this need to raise NotImplementedError on the
|
|
705 |
# add_lines_with_ghosts api.
|
|
706 |
vf = self.get_file() |
|
707 |
# add a revision with ghost parents
|
|
|
2249.5.12
by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8 |
708 |
# The preferred form is utf8, but we should translate when needed
|
709 |
parent_id_unicode = u'b\xbfse' |
|
710 |
parent_id_utf8 = parent_id_unicode.encode('utf8') |
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
711 |
try: |
|
2309.4.7
by John Arbash Meinel
Update VersionedFile tests to ensure that they can take Unicode, |
712 |
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], []) |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
713 |
except NotImplementedError: |
714 |
# check the other ghost apis are also not implemented
|
|
715 |
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo']) |
|
716 |
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo') |
|
717 |
return
|
|
|
2150.2.1
by Robert Collins
Correctly decode utf8 revision ids from knits when parsing, fixes a regression where a unicode revision id is stored correctly, but then indexed by the utf8 value on the next invocation of bzr, rather than the unicode value. |
718 |
vf = self.reopen_file() |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
719 |
# test key graph related apis: getncestry, _graph, get_parents
|
720 |
# has_version
|
|
721 |
# - these are ghost unaware and must not be reflect ghosts
|
|
|
2249.5.12
by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8 |
722 |
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse')) |
723 |
self.assertFalse(vf.has_version(parent_id_utf8)) |
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
724 |
# we have _with_ghost apis to give us ghost information.
|
|
2249.5.12
by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8 |
725 |
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse'])) |
726 |
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse')) |
|
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
727 |
# if we add something that is a ghost of another, it should correct the
|
728 |
# results of the prior apis
|
|
|
2858.2.1
by Martin Pool
Remove most calls to safe_file_id and safe_revision_id. |
729 |
vf.add_lines(parent_id_utf8, [], []) |
|
2249.5.12
by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8 |
730 |
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse'])) |
|
3287.5.2
by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code. |
731 |
self.assertEqual({'notbxbfse':(parent_id_utf8,)}, |
732 |
vf.get_parent_map(['notbxbfse'])) |
|
|
2249.5.12
by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8 |
733 |
self.assertTrue(vf.has_version(parent_id_utf8)) |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
734 |
# we have _with_ghost apis to give us ghost information.
|
|
2858.2.1
by Martin Pool
Remove most calls to safe_file_id and safe_revision_id. |
735 |
self.assertEqual([parent_id_utf8, 'notbxbfse'], |
736 |
vf.get_ancestry_with_ghosts(['notbxbfse'])) |
|
|
2249.5.12
by John Arbash Meinel
Change the APIs for VersionedFile, Store, and some of Repository into utf-8 |
737 |
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse')) |
|
1594.2.8
by Robert Collins
add ghost aware apis to knits. |
738 |
|
|
1594.2.9
by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all. |
739 |
def test_add_lines_with_ghosts_after_normal_revs(self): |
740 |
# some versioned file formats allow lines to be added with parent
|
|
741 |
# information that is > than that in the format. Formats that do
|
|
742 |
# not support this need to raise NotImplementedError on the
|
|
743 |
# add_lines_with_ghosts api.
|
|
744 |
vf = self.get_file() |
|
745 |
# probe for ghost support
|
|
746 |
try: |
|
|
3287.6.5
by Robert Collins
Deprecate VersionedFile.has_ghost. |
747 |
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n']) |
|
1594.2.9
by Robert Collins
Teach Knit repositories how to handle ghosts without corrupting at all. |
748 |
except NotImplementedError: |
749 |
return
|
|
750 |
vf.add_lines_with_ghosts('references_ghost', |
|
751 |
['base', 'a_ghost'], |
|
752 |
['line\n', 'line_b\n', 'line_c\n']) |
|
753 |
origins = vf.annotate('references_ghost') |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
754 |
self.assertEqual(('base', 'line\n'), origins[0]) |
755 |
self.assertEqual(('base', 'line_b\n'), origins[1]) |
|
756 |
self.assertEqual(('references_ghost', 'line_c\n'), origins[2]) |
|
|
1594.2.23
by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files. |
757 |
|
758 |
def test_readonly_mode(self): |
|
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
759 |
t = self.get_transport() |
|
1594.2.23
by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files. |
760 |
factory = self.get_factory() |
|
6619.3.14
by Jelmer Vernooij
Convert some octal numbers to new notations. |
761 |
vf = factory('id', t, 0o777, create=True, access_mode='w') |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
762 |
vf = factory('id', t, access_mode='r') |
|
1594.2.23
by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files. |
763 |
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], []) |
764 |
self.assertRaises(errors.ReadOnlyError, |
|
765 |
vf.add_lines_with_ghosts, |
|
766 |
'base', |
|
767 |
[],
|
|
768 |
[])
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
769 |
|
|
3316.2.9
by Robert Collins
* ``VersionedFile.get_sha1`` is deprecated, please use |
770 |
def test_get_sha1s(self): |
|
1666.1.6
by Robert Collins
Make knit the default format. |
771 |
# check the sha1 data is available
|
772 |
vf = self.get_file() |
|
773 |
# a simple file
|
|
774 |
vf.add_lines('a', [], ['a\n']) |
|
775 |
# the same file, different metadata
|
|
776 |
vf.add_lines('b', ['a'], ['a\n']) |
|
777 |
# a file differing only in last newline.
|
|
778 |
vf.add_lines('c', [], ['a']) |
|
|
3350.8.3
by Robert Collins
VF.get_sha1s needed changing to be stackable. |
779 |
self.assertEqual({ |
780 |
'a': '3f786850e387550fdab836ed7e6dc881de23001b', |
|
781 |
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', |
|
782 |
'b': '3f786850e387550fdab836ed7e6dc881de23001b', |
|
783 |
},
|
|
784 |
vf.get_sha1s(['a', 'c', 'b'])) |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
785 |
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
786 |
|
|
2535.3.1
by Andrew Bennetts
Add get_format_signature to VersionedFile |
787 |
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn): |
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
788 |
|
789 |
def get_file(self, name='foo'): |
|
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
790 |
return WeaveFile(name, self.get_transport(), |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
791 |
create=True, |
792 |
get_scope=self.get_transaction) |
|
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
793 |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
794 |
def get_file_corrupted_text(self): |
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
795 |
w = WeaveFile('foo', self.get_transport(), |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
796 |
create=True, |
797 |
get_scope=self.get_transaction) |
|
|
1563.2.13
by Robert Collins
InterVersionedFile implemented. |
798 |
w.add_lines('v1', [], ['hello\n']) |
799 |
w.add_lines('v2', ['v1'], ['hello\n', 'there\n']) |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
800 |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
801 |
# We are going to invasively corrupt the text
|
802 |
# Make sure the internals of weave are the same
|
|
803 |
self.assertEqual([('{', 0) |
|
804 |
, 'hello\n' |
|
805 |
, ('}', None) |
|
806 |
, ('{', 1) |
|
807 |
, 'there\n' |
|
808 |
, ('}', None) |
|
809 |
], w._weave) |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
810 |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
811 |
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f' |
812 |
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef' |
|
813 |
], w._sha1s) |
|
814 |
w.check() |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
815 |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
816 |
# Corrupted
|
817 |
w._weave[4] = 'There\n' |
|
818 |
return w |
|
819 |
||
820 |
def get_file_corrupted_checksum(self): |
|
821 |
w = self.get_file_corrupted_text() |
|
822 |
# Corrected
|
|
823 |
w._weave[4] = 'there\n' |
|
824 |
self.assertEqual('hello\nthere\n', w.get_text('v2')) |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
825 |
|
|
1563.2.6
by Robert Collins
Start check tests for knits (pending), and remove dead code. |
826 |
#Invalid checksum, first digit changed
|
827 |
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef' |
|
828 |
return w |
|
829 |
||
|
1666.1.6
by Robert Collins
Make knit the default format. |
830 |
def reopen_file(self, name='foo', create=False): |
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
831 |
return WeaveFile(name, self.get_transport(), |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
832 |
create=create, |
833 |
get_scope=self.get_transaction) |
|
|
1563.2.9
by Robert Collins
Update versionedfile api tests to ensure that data is available after every operation. |
834 |
|
|
1563.2.25
by Robert Collins
Merge in upstream. |
835 |
def test_no_implicit_create(self): |
836 |
self.assertRaises(errors.NoSuchFile, |
|
837 |
WeaveFile, |
|
838 |
'foo', |
|
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
839 |
self.get_transport(), |
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
840 |
get_scope=self.get_transaction) |
|
1563.2.25
by Robert Collins
Merge in upstream. |
841 |
|
|
1594.2.23
by Robert Collins
Test versioned file storage handling of clean/dirty status for accessed versioned files. |
842 |
def get_factory(self): |
843 |
return WeaveFile |
|
844 |
||
|
1563.2.1
by Robert Collins
Merge in a variation of the versionedfile api from versioned-file. |
845 |
|
|
3062.1.9
by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile |
846 |
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport): |
847 |
||
848 |
def setUp(self): |
|
|
6552.1.4
by Vincent Ladeuil
Remaining tests matching setup(self) that can be rewritten with super(). |
849 |
super(TestPlanMergeVersionedFile, self).setUp() |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
850 |
mapper = PrefixMapper() |
851 |
factory = make_file_factory(True, mapper) |
|
852 |
self.vf1 = factory(self.get_transport('root-1')) |
|
853 |
self.vf2 = factory(self.get_transport('root-2')) |
|
854 |
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root') |
|
855 |
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2]) |
|
|
3062.1.9
by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile |
856 |
|
857 |
def test_add_lines(self): |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
858 |
self.plan_merge_vf.add_lines(('root', 'a:'), [], []) |
859 |
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, |
|
860 |
('root', 'a'), [], []) |
|
861 |
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, |
|
862 |
('root', 'a:'), None, []) |
|
863 |
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, |
|
864 |
('root', 'a:'), [], None) |
|
|
3062.1.9
by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile |
865 |
|
866 |
def setup_abcde(self): |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
867 |
self.vf1.add_lines(('root', 'A'), [], ['a']) |
868 |
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b']) |
|
869 |
self.vf2.add_lines(('root', 'C'), [], ['c']) |
|
870 |
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d']) |
|
871 |
self.plan_merge_vf.add_lines(('root', 'E:'), |
|
872 |
[('root', 'B'), ('root', 'D')], ['e']) |
|
|
3062.1.9
by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile |
873 |
|
874 |
def test_get_parents(self): |
|
875 |
self.setup_abcde() |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
876 |
self.assertEqual({('root', 'B'):(('root', 'A'),)}, |
877 |
self.plan_merge_vf.get_parent_map([('root', 'B')])) |
|
878 |
self.assertEqual({('root', 'D'):(('root', 'C'),)}, |
|
879 |
self.plan_merge_vf.get_parent_map([('root', 'D')])) |
|
880 |
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))}, |
|
881 |
self.plan_merge_vf.get_parent_map([('root', 'E:')])) |
|
882 |
self.assertEqual({}, |
|
883 |
self.plan_merge_vf.get_parent_map([('root', 'F')])) |
|
|
3287.5.2
by Robert Collins
Deprecate VersionedFile.get_parents, breaking pulling from a ghost containing knit or pack repository to weaves, which improves correctness and allows simplification of core code. |
884 |
self.assertEqual({ |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
885 |
('root', 'B'):(('root', 'A'),), |
886 |
('root', 'D'):(('root', 'C'),), |
|
887 |
('root', 'E:'):(('root', 'B'),('root', 'D')), |
|
888 |
},
|
|
889 |
self.plan_merge_vf.get_parent_map( |
|
890 |
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')])) |
|
|
3062.1.9
by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile |
891 |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
892 |
def test_get_record_stream(self): |
|
3062.1.9
by Aaron Bentley
Move PlanMerge into merge and _PlanMergeVersionedFile into versionedfile |
893 |
self.setup_abcde() |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
894 |
def get_record(suffix): |
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
895 |
return next(self.plan_merge_vf.get_record_stream( |
896 |
[('root', suffix)], 'unordered', True)) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
897 |
self.assertEqual('a', get_record('A').get_bytes_as('fulltext')) |
898 |
self.assertEqual('c', get_record('C').get_bytes_as('fulltext')) |
|
899 |
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext')) |
|
900 |
self.assertEqual('absent', get_record('F').storage_kind) |
|
|
1666.1.1
by Robert Collins
Add trivial http-using test for versioned files. |
901 |
|
902 |
||
903 |
class TestReadonlyHttpMixin(object): |
|
904 |
||
|
3316.2.3
by Robert Collins
Remove manual notification of transaction finishing on versioned files. |
905 |
def get_transaction(self): |
906 |
return 1 |
|
907 |
||
|
1666.1.1
by Robert Collins
Add trivial http-using test for versioned files. |
908 |
def test_readonly_http_works(self): |
909 |
# we should be able to read from http with a versioned file.
|
|
910 |
vf = self.get_file() |
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
911 |
# try an empty file access
|
|
6083.1.1
by Jelmer Vernooij
Use get_transport_from_{url,path} in more places. |
912 |
readonly_vf = self.get_factory()('foo', |
913 |
transport.get_transport_from_url(self.get_readonly_url('.'))) |
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
914 |
self.assertEqual([], readonly_vf.versions()) |
|
5229.1.13
by Vincent Ladeuil
Eager tests are bad, we really want two tests here. |
915 |
|
916 |
def test_readonly_http_works_with_feeling(self): |
|
917 |
# we should be able to read from http with a versioned file.
|
|
918 |
vf = self.get_file() |
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
919 |
# now with feeling.
|
|
1666.1.1
by Robert Collins
Add trivial http-using test for versioned files. |
920 |
vf.add_lines('1', [], ['a\n']) |
921 |
vf.add_lines('2', ['1'], ['b\n', 'a\n']) |
|
|
6083.1.1
by Jelmer Vernooij
Use get_transport_from_{url,path} in more places. |
922 |
readonly_vf = self.get_factory()('foo', |
923 |
transport.get_transport_from_url(self.get_readonly_url('.'))) |
|
|
1666.1.6
by Robert Collins
Make knit the default format. |
924 |
self.assertEqual(['1', '2'], vf.versions()) |
|
5229.1.14
by Vincent Ladeuil
Fixed as per jam's review :) |
925 |
self.assertEqual(['1', '2'], readonly_vf.versions()) |
|
1666.1.1
by Robert Collins
Add trivial http-using test for versioned files. |
926 |
for version in readonly_vf.versions(): |
927 |
readonly_vf.get_lines(version) |
|
928 |
||
929 |
||
930 |
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin): |
|
931 |
||
932 |
def get_file(self): |
|
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
933 |
return WeaveFile('foo', self.get_transport(), |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
934 |
create=True, |
935 |
get_scope=self.get_transaction) |
|
|
1666.1.1
by Robert Collins
Add trivial http-using test for versioned files. |
936 |
|
937 |
def get_factory(self): |
|
938 |
return WeaveFile |
|
939 |
||
940 |
||
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
941 |
class MergeCasesMixin(object): |
942 |
||
943 |
def doMerge(self, base, a, b, mp): |
|
944 |
from textwrap import dedent |
|
945 |
||
946 |
def addcrlf(x): |
|
947 |
return x + '\n' |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
948 |
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
949 |
w = self.get_file() |
|
6631.3.1
by Martin
Run 2to3 map fixer and refactor after |
950 |
w.add_lines('text0', [], list(map(addcrlf, base))) |
951 |
w.add_lines('text1', ['text0'], list(map(addcrlf, a))) |
|
952 |
w.add_lines('text2', ['text0'], list(map(addcrlf, b))) |
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
953 |
|
954 |
self.log_contents(w) |
|
955 |
||
956 |
self.log('merge plan:') |
|
957 |
p = list(w.plan_merge('text1', 'text2')) |
|
958 |
for state, line in p: |
|
959 |
if line: |
|
960 |
self.log('%12s | %s' % (state, line[:-1])) |
|
961 |
||
962 |
self.log('merge:') |
|
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
963 |
mt = BytesIO() |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
964 |
mt.writelines(w.weave_merge(p)) |
965 |
mt.seek(0) |
|
966 |
self.log(mt.getvalue()) |
|
967 |
||
|
6631.3.1
by Martin
Run 2to3 map fixer and refactor after |
968 |
mp = list(map(addcrlf, mp)) |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
969 |
self.assertEqual(mt.readlines(), mp) |
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
970 |
|
971 |
||
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
972 |
def testOneInsert(self): |
973 |
self.doMerge([], |
|
974 |
['aa'], |
|
975 |
[],
|
|
976 |
['aa']) |
|
977 |
||
978 |
def testSeparateInserts(self): |
|
979 |
self.doMerge(['aaa', 'bbb', 'ccc'], |
|
980 |
['aaa', 'xxx', 'bbb', 'ccc'], |
|
981 |
['aaa', 'bbb', 'yyy', 'ccc'], |
|
982 |
['aaa', 'xxx', 'bbb', 'yyy', 'ccc']) |
|
983 |
||
984 |
def testSameInsert(self): |
|
985 |
self.doMerge(['aaa', 'bbb', 'ccc'], |
|
986 |
['aaa', 'xxx', 'bbb', 'ccc'], |
|
987 |
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'], |
|
988 |
['aaa', 'xxx', 'bbb', 'yyy', 'ccc']) |
|
989 |
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb'] |
|
990 |
def testOverlappedInsert(self): |
|
991 |
self.doMerge(['aaa', 'bbb'], |
|
992 |
['aaa', 'xxx', 'yyy', 'bbb'], |
|
993 |
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected) |
|
994 |
||
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
995 |
# really it ought to reduce this to
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
996 |
# ['aaa', 'xxx', 'yyy', 'bbb']
|
997 |
||
998 |
||
999 |
def testClashReplace(self): |
|
1000 |
self.doMerge(['aaa'], |
|
1001 |
['xxx'], |
|
1002 |
['yyy', 'zzz'], |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1003 |
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz', |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1004 |
'>>>>>>> ']) |
1005 |
||
1006 |
def testNonClashInsert1(self): |
|
1007 |
self.doMerge(['aaa'], |
|
1008 |
['xxx', 'aaa'], |
|
1009 |
['yyy', 'zzz'], |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1010 |
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz', |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1011 |
'>>>>>>> ']) |
1012 |
||
1013 |
def testNonClashInsert2(self): |
|
1014 |
self.doMerge(['aaa'], |
|
1015 |
['aaa'], |
|
1016 |
['yyy', 'zzz'], |
|
1017 |
['yyy', 'zzz']) |
|
1018 |
||
1019 |
||
1020 |
def testDeleteAndModify(self): |
|
1021 |
"""Clashing delete and modification. |
|
1022 |
||
1023 |
If one side modifies a region and the other deletes it then
|
|
1024 |
there should be a conflict with one side blank.
|
|
1025 |
"""
|
|
1026 |
||
1027 |
#######################################
|
|
1028 |
# skippd, not working yet
|
|
1029 |
return
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1030 |
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1031 |
self.doMerge(['aaa', 'bbb', 'ccc'], |
1032 |
['aaa', 'ddd', 'ccc'], |
|
1033 |
['aaa', 'ccc'], |
|
1034 |
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc']) |
|
1035 |
||
1036 |
def _test_merge_from_strings(self, base, a, b, expected): |
|
1037 |
w = self.get_file() |
|
1038 |
w.add_lines('text0', [], base.splitlines(True)) |
|
1039 |
w.add_lines('text1', ['text0'], a.splitlines(True)) |
|
1040 |
w.add_lines('text2', ['text0'], b.splitlines(True)) |
|
1041 |
self.log('merge plan:') |
|
1042 |
p = list(w.plan_merge('text1', 'text2')) |
|
1043 |
for state, line in p: |
|
1044 |
if line: |
|
1045 |
self.log('%12s | %s' % (state, line[:-1])) |
|
1046 |
self.log('merge result:') |
|
1047 |
result_text = ''.join(w.weave_merge(p)) |
|
1048 |
self.log(result_text) |
|
1049 |
self.assertEqualDiff(result_text, expected) |
|
1050 |
||
1051 |
def test_weave_merge_conflicts(self): |
|
1052 |
# does weave merge properly handle plans that end with unchanged?
|
|
1053 |
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')])) |
|
1054 |
self.assertEqual(result, 'hello\n') |
|
1055 |
||
1056 |
def test_deletion_extended(self): |
|
1057 |
"""One side deletes, the other deletes more. |
|
1058 |
"""
|
|
1059 |
base = """\ |
|
1060 |
line 1
|
|
1061 |
line 2
|
|
1062 |
line 3
|
|
1063 |
"""
|
|
1064 |
a = """\ |
|
1065 |
line 1
|
|
1066 |
line 2
|
|
1067 |
"""
|
|
1068 |
b = """\ |
|
1069 |
line 1
|
|
1070 |
"""
|
|
1071 |
result = """\ |
|
1072 |
line 1
|
|
|
4312.1.3
by John Arbash Meinel
Different sides deleting different amounts is now a conflict. |
1073 |
<<<<<<<\x20 |
1074 |
line 2
|
|
1075 |
=======
|
|
1076 |
>>>>>>>\x20 |
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1077 |
"""
|
1078 |
self._test_merge_from_strings(base, a, b, result) |
|
1079 |
||
1080 |
def test_deletion_overlap(self): |
|
1081 |
"""Delete overlapping regions with no other conflict. |
|
1082 |
||
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1083 |
Arguably it'd be better to treat these as agreement, rather than
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1084 |
conflict, but for now conflict is safer.
|
1085 |
"""
|
|
1086 |
base = """\ |
|
1087 |
start context
|
|
1088 |
int a() {} |
|
1089 |
int b() {} |
|
1090 |
int c() {} |
|
1091 |
end context
|
|
1092 |
"""
|
|
1093 |
a = """\ |
|
1094 |
start context
|
|
1095 |
int a() {} |
|
1096 |
end context
|
|
1097 |
"""
|
|
1098 |
b = """\ |
|
1099 |
start context
|
|
1100 |
int c() {} |
|
1101 |
end context
|
|
1102 |
"""
|
|
1103 |
result = """\ |
|
1104 |
start context
|
|
|
3943.8.2
by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20. |
1105 |
<<<<<<<\x20 |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1106 |
int a() {} |
1107 |
=======
|
|
1108 |
int c() {} |
|
|
3943.8.2
by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20. |
1109 |
>>>>>>>\x20 |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1110 |
end context
|
1111 |
"""
|
|
1112 |
self._test_merge_from_strings(base, a, b, result) |
|
1113 |
||
1114 |
def test_agreement_deletion(self): |
|
1115 |
"""Agree to delete some lines, without conflicts.""" |
|
1116 |
base = """\ |
|
1117 |
start context
|
|
1118 |
base line 1
|
|
1119 |
base line 2
|
|
1120 |
end context
|
|
1121 |
"""
|
|
1122 |
a = """\ |
|
1123 |
start context
|
|
1124 |
base line 1
|
|
1125 |
end context
|
|
1126 |
"""
|
|
1127 |
b = """\ |
|
1128 |
start context
|
|
1129 |
base line 1
|
|
1130 |
end context
|
|
1131 |
"""
|
|
1132 |
result = """\ |
|
1133 |
start context
|
|
1134 |
base line 1
|
|
1135 |
end context
|
|
1136 |
"""
|
|
1137 |
self._test_merge_from_strings(base, a, b, result) |
|
1138 |
||
1139 |
def test_sync_on_deletion(self): |
|
1140 |
"""Specific case of merge where we can synchronize incorrectly. |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1141 |
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1142 |
A previous version of the weave merge concluded that the two versions
|
1143 |
agreed on deleting line 2, and this could be a synchronization point.
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1144 |
Line 1 was then considered in isolation, and thought to be deleted on
|
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1145 |
both sides.
|
1146 |
||
1147 |
It's better to consider the whole thing as a disagreement region.
|
|
1148 |
"""
|
|
1149 |
base = """\ |
|
1150 |
start context
|
|
1151 |
base line 1
|
|
1152 |
base line 2
|
|
1153 |
end context
|
|
1154 |
"""
|
|
1155 |
a = """\ |
|
1156 |
start context
|
|
1157 |
base line 1
|
|
1158 |
a's replacement line 2
|
|
1159 |
end context
|
|
1160 |
"""
|
|
1161 |
b = """\ |
|
1162 |
start context
|
|
1163 |
b replaces
|
|
1164 |
both lines
|
|
1165 |
end context
|
|
1166 |
"""
|
|
1167 |
result = """\ |
|
1168 |
start context
|
|
|
3943.8.2
by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20. |
1169 |
<<<<<<<\x20 |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1170 |
base line 1
|
1171 |
a's replacement line 2
|
|
1172 |
=======
|
|
1173 |
b replaces
|
|
1174 |
both lines
|
|
|
3943.8.2
by Marius Kruger
fix tests relying on trailing whitespace by replacing it with \x20. |
1175 |
>>>>>>>\x20 |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1176 |
end context
|
1177 |
"""
|
|
1178 |
self._test_merge_from_strings(base, a, b, result) |
|
1179 |
||
1180 |
||
|
2535.3.1
by Andrew Bennetts
Add get_format_signature to VersionedFile |
1181 |
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin): |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1182 |
|
1183 |
def get_file(self, name='foo'): |
|
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
1184 |
return WeaveFile(name, self.get_transport(), |
|
5273.1.7
by Vincent Ladeuil
No more use of the get_transport imported *symbol*, all uses are through |
1185 |
create=True) |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1186 |
|
1187 |
def log_contents(self, w): |
|
1188 |
self.log('weave is:') |
|
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
1189 |
tmpf = BytesIO() |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1190 |
write_weave(w, tmpf) |
1191 |
self.log(tmpf.getvalue()) |
|
1192 |
||
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1193 |
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======', |
|
1664.2.9
by Aaron Bentley
Ported weave merge test to versionedfile |
1194 |
'xxx', '>>>>>>> ', 'bbb'] |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1195 |
|
1196 |
||
1197 |
class TestContentFactoryAdaption(TestCaseWithMemoryTransport): |
|
1198 |
||
1199 |
def test_select_adaptor(self): |
|
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1200 |
"""Test expected adapters exist.""" |
1201 |
# One scenario for each lookup combination we expect to use.
|
|
1202 |
# Each is source_kind, requested_kind, adapter class
|
|
1203 |
scenarios = [ |
|
1204 |
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText), |
|
1205 |
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText), |
|
1206 |
('knit-annotated-delta-gz', 'knit-delta-gz', |
|
1207 |
_mod_knit.DeltaAnnotatedToUnannotated), |
|
1208 |
('knit-annotated-delta-gz', 'fulltext', |
|
1209 |
_mod_knit.DeltaAnnotatedToFullText), |
|
1210 |
('knit-annotated-ft-gz', 'knit-ft-gz', |
|
1211 |
_mod_knit.FTAnnotatedToUnannotated), |
|
1212 |
('knit-annotated-ft-gz', 'fulltext', |
|
1213 |
_mod_knit.FTAnnotatedToFullText), |
|
1214 |
]
|
|
1215 |
for source, requested, klass in scenarios: |
|
1216 |
adapter_factory = versionedfile.adapter_registry.get( |
|
1217 |
(source, requested)) |
|
1218 |
adapter = adapter_factory(None) |
|
1219 |
self.assertIsInstance(adapter, klass) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1220 |
|
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1221 |
def get_knit(self, annotated=True): |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1222 |
mapper = ConstantMapper('knit') |
1223 |
transport = self.get_transport() |
|
1224 |
return make_file_factory(annotated, mapper)(transport) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1225 |
|
1226 |
def helpGetBytes(self, f, ft_adapter, delta_adapter): |
|
|
3350.3.22
by Robert Collins
Review feedback. |
1227 |
"""Grab the interested adapted texts for tests.""" |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1228 |
# origin is a fulltext
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1229 |
entries = f.get_record_stream([('origin',)], 'unordered', False) |
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
1230 |
base = next(entries) |
|
4005.3.1
by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters. |
1231 |
ft_data = ft_adapter.get_bytes(base) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1232 |
# merged is both a delta and multiple parents.
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1233 |
entries = f.get_record_stream([('merged',)], 'unordered', False) |
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
1234 |
merged = next(entries) |
|
4005.3.1
by Robert Collins
Change the signature on VersionedFiles adapters to allow less typing and more flexability inside adapters. |
1235 |
delta_data = delta_adapter.get_bytes(merged) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1236 |
return ft_data, delta_data |
1237 |
||
1238 |
def test_deannotation_noeol(self): |
|
1239 |
"""Test converting annotated knits to unannotated knits.""" |
|
1240 |
# we need a full text, and a delta
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1241 |
f = self.get_knit() |
1242 |
get_diamond_files(f, 1, trailing_eol=False) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1243 |
ft_data, delta_data = self.helpGetBytes(f, |
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1244 |
_mod_knit.FTAnnotatedToUnannotated(None), |
1245 |
_mod_knit.DeltaAnnotatedToUnannotated(None)) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1246 |
self.assertEqual( |
1247 |
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n' |
|
1248 |
'origin\n' |
|
1249 |
'end origin\n', |
|
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
1250 |
GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read()) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1251 |
self.assertEqual( |
1252 |
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n' |
|
1253 |
'1,2,3\nleft\nright\nmerged\nend merged\n', |
|
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
1254 |
GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read()) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1255 |
|
1256 |
def test_deannotation(self): |
|
1257 |
"""Test converting annotated knits to unannotated knits.""" |
|
1258 |
# we need a full text, and a delta
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1259 |
f = self.get_knit() |
1260 |
get_diamond_files(f, 1) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1261 |
ft_data, delta_data = self.helpGetBytes(f, |
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1262 |
_mod_knit.FTAnnotatedToUnannotated(None), |
1263 |
_mod_knit.DeltaAnnotatedToUnannotated(None)) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1264 |
self.assertEqual( |
1265 |
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n' |
|
1266 |
'origin\n' |
|
1267 |
'end origin\n', |
|
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
1268 |
GzipFile(mode='rb', fileobj=BytesIO(ft_data)).read()) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1269 |
self.assertEqual( |
1270 |
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n' |
|
1271 |
'2,2,2\nright\nmerged\nend merged\n', |
|
|
6621.22.2
by Martin
Use BytesIO or StringIO from bzrlib.sixish |
1272 |
GzipFile(mode='rb', fileobj=BytesIO(delta_data)).read()) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1273 |
|
1274 |
def test_annotated_to_fulltext_no_eol(self): |
|
1275 |
"""Test adapting annotated knits to full texts (for -> weaves).""" |
|
1276 |
# we need a full text, and a delta
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1277 |
f = self.get_knit() |
1278 |
get_diamond_files(f, 1, trailing_eol=False) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1279 |
# Reconstructing a full text requires a backing versioned file, and it
|
1280 |
# must have the base lines requested from it.
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1281 |
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1282 |
ft_data, delta_data = self.helpGetBytes(f, |
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1283 |
_mod_knit.FTAnnotatedToFullText(None), |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1284 |
_mod_knit.DeltaAnnotatedToFullText(logged_vf)) |
1285 |
self.assertEqual('origin', ft_data) |
|
1286 |
self.assertEqual('base\nleft\nright\nmerged', delta_data) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1287 |
self.assertEqual([('get_record_stream', [('left',)], 'unordered', |
1288 |
True)], logged_vf.calls) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1289 |
|
1290 |
def test_annotated_to_fulltext(self): |
|
1291 |
"""Test adapting annotated knits to full texts (for -> weaves).""" |
|
1292 |
# we need a full text, and a delta
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1293 |
f = self.get_knit() |
1294 |
get_diamond_files(f, 1) |
|
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1295 |
# Reconstructing a full text requires a backing versioned file, and it
|
1296 |
# must have the base lines requested from it.
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1297 |
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f) |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1298 |
ft_data, delta_data = self.helpGetBytes(f, |
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1299 |
_mod_knit.FTAnnotatedToFullText(None), |
|
3350.3.4
by Robert Collins
Finish adapters for annotated knits to unannotated knits and full texts. |
1300 |
_mod_knit.DeltaAnnotatedToFullText(logged_vf)) |
1301 |
self.assertEqual('origin\n', ft_data) |
|
1302 |
self.assertEqual('base\nleft\nright\nmerged\n', delta_data) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1303 |
self.assertEqual([('get_record_stream', [('left',)], 'unordered', |
1304 |
True)], logged_vf.calls) |
|
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1305 |
|
1306 |
def test_unannotated_to_fulltext(self): |
|
1307 |
"""Test adapting unannotated knits to full texts. |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1308 |
|
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1309 |
This is used for -> weaves, and for -> annotated knits.
|
1310 |
"""
|
|
1311 |
# we need a full text, and a delta
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1312 |
f = self.get_knit(annotated=False) |
1313 |
get_diamond_files(f, 1) |
|
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1314 |
# Reconstructing a full text requires a backing versioned file, and it
|
1315 |
# must have the base lines requested from it.
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1316 |
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f) |
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1317 |
ft_data, delta_data = self.helpGetBytes(f, |
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1318 |
_mod_knit.FTPlainToFullText(None), |
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1319 |
_mod_knit.DeltaPlainToFullText(logged_vf)) |
1320 |
self.assertEqual('origin\n', ft_data) |
|
1321 |
self.assertEqual('base\nleft\nright\nmerged\n', delta_data) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1322 |
self.assertEqual([('get_record_stream', [('left',)], 'unordered', |
1323 |
True)], logged_vf.calls) |
|
|
3350.3.5
by Robert Collins
Create adapters from plain compressed knit content. |
1324 |
|
|
3350.3.6
by Robert Collins
Test EOL behaviour of plain knit record adapters. |
1325 |
def test_unannotated_to_fulltext_no_eol(self): |
1326 |
"""Test adapting unannotated knits to full texts. |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
1327 |
|
|
3350.3.6
by Robert Collins
Test EOL behaviour of plain knit record adapters. |
1328 |
This is used for -> weaves, and for -> annotated knits.
|
1329 |
"""
|
|
1330 |
# we need a full text, and a delta
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1331 |
f = self.get_knit(annotated=False) |
1332 |
get_diamond_files(f, 1, trailing_eol=False) |
|
|
3350.3.6
by Robert Collins
Test EOL behaviour of plain knit record adapters. |
1333 |
# Reconstructing a full text requires a backing versioned file, and it
|
1334 |
# must have the base lines requested from it.
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1335 |
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f) |
|
3350.3.6
by Robert Collins
Test EOL behaviour of plain knit record adapters. |
1336 |
ft_data, delta_data = self.helpGetBytes(f, |
|
3350.3.7
by Robert Collins
Create a registry of versioned file record adapters. |
1337 |
_mod_knit.FTPlainToFullText(None), |
|
3350.3.6
by Robert Collins
Test EOL behaviour of plain knit record adapters. |
1338 |
_mod_knit.DeltaPlainToFullText(logged_vf)) |
1339 |
self.assertEqual('origin', ft_data) |
|
1340 |
self.assertEqual('base\nleft\nright\nmerged', delta_data) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1341 |
self.assertEqual([('get_record_stream', [('left',)], 'unordered', |
1342 |
True)], logged_vf.calls) |
|
|
3350.3.6
by Robert Collins
Test EOL behaviour of plain knit record adapters. |
1343 |
|
|
3350.6.1
by Robert Collins
* New ``versionedfile.KeyMapper`` interface to abstract out the access to |
1344 |
|
1345 |
class TestKeyMapper(TestCaseWithMemoryTransport): |
|
1346 |
"""Tests for various key mapping logic.""" |
|
1347 |
||
1348 |
def test_identity_mapper(self): |
|
1349 |
mapper = versionedfile.ConstantMapper("inventory") |
|
1350 |
self.assertEqual("inventory", mapper.map(('foo@ar',))) |
|
1351 |
self.assertEqual("inventory", mapper.map(('quux',))) |
|
1352 |
||
1353 |
def test_prefix_mapper(self): |
|
1354 |
#format5: plain
|
|
1355 |
mapper = versionedfile.PrefixMapper() |
|
1356 |
self.assertEqual("file-id", mapper.map(("file-id", "revision-id"))) |
|
1357 |
self.assertEqual("new-id", mapper.map(("new-id", "revision-id"))) |
|
1358 |
self.assertEqual(('file-id',), mapper.unmap("file-id")) |
|
1359 |
self.assertEqual(('new-id',), mapper.unmap("new-id")) |
|
1360 |
||
1361 |
def test_hash_prefix_mapper(self): |
|
1362 |
#format6: hash + plain
|
|
1363 |
mapper = versionedfile.HashPrefixMapper() |
|
1364 |
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id"))) |
|
1365 |
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id"))) |
|
1366 |
self.assertEqual(('file-id',), mapper.unmap("9b/file-id")) |
|
1367 |
self.assertEqual(('new-id',), mapper.unmap("45/new-id")) |
|
1368 |
||
1369 |
def test_hash_escaped_mapper(self): |
|
1370 |
#knit1: hash + escaped
|
|
1371 |
mapper = versionedfile.HashEscapedPrefixMapper() |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1372 |
self.assertEqual("88/%2520", mapper.map((" ", "revision-id"))) |
1373 |
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id", |
|
1374 |
"revision-id"))) |
|
1375 |
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id", |
|
1376 |
"revision-id"))) |
|
1377 |
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d")) |
|
1378 |
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d")) |
|
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
1379 |
|
1380 |
||
1381 |
class TestVersionedFiles(TestCaseWithMemoryTransport): |
|
1382 |
"""Tests for the multiple-file variant of VersionedFile.""" |
|
1383 |
||
|
5559.2.2
by Martin Pool
Change to using standard load_tests_apply_scenarios. |
1384 |
# We want to be sure of behaviour for:
|
1385 |
# weaves prefix layout (weave texts)
|
|
1386 |
# individually named weaves (weave inventories)
|
|
1387 |
# annotated knits - prefix|hash|hash-escape layout, we test the third only
|
|
1388 |
# as it is the most complex mapper.
|
|
1389 |
# individually named knits
|
|
1390 |
# individual no-graph knits in packs (signatures)
|
|
1391 |
# individual graph knits in packs (inventories)
|
|
1392 |
# individual graph nocompression knits in packs (revisions)
|
|
1393 |
# plain text knits in packs (texts)
|
|
1394 |
len_one_scenarios = [ |
|
1395 |
('weave-named', { |
|
1396 |
'cleanup':None, |
|
1397 |
'factory':make_versioned_files_factory(WeaveFile, |
|
1398 |
ConstantMapper('inventory')), |
|
1399 |
'graph':True, |
|
1400 |
'key_length':1, |
|
1401 |
'support_partial_insertion': False, |
|
1402 |
}),
|
|
1403 |
('named-knit', { |
|
1404 |
'cleanup':None, |
|
1405 |
'factory':make_file_factory(False, ConstantMapper('revisions')), |
|
1406 |
'graph':True, |
|
1407 |
'key_length':1, |
|
1408 |
'support_partial_insertion': False, |
|
1409 |
}),
|
|
1410 |
('named-nograph-nodelta-knit-pack', { |
|
1411 |
'cleanup':cleanup_pack_knit, |
|
1412 |
'factory':make_pack_factory(False, False, 1), |
|
1413 |
'graph':False, |
|
1414 |
'key_length':1, |
|
1415 |
'support_partial_insertion': False, |
|
1416 |
}),
|
|
1417 |
('named-graph-knit-pack', { |
|
1418 |
'cleanup':cleanup_pack_knit, |
|
1419 |
'factory':make_pack_factory(True, True, 1), |
|
1420 |
'graph':True, |
|
1421 |
'key_length':1, |
|
1422 |
'support_partial_insertion': True, |
|
1423 |
}),
|
|
1424 |
('named-graph-nodelta-knit-pack', { |
|
1425 |
'cleanup':cleanup_pack_knit, |
|
1426 |
'factory':make_pack_factory(True, False, 1), |
|
1427 |
'graph':True, |
|
1428 |
'key_length':1, |
|
1429 |
'support_partial_insertion': False, |
|
1430 |
}),
|
|
1431 |
('groupcompress-nograph', { |
|
1432 |
'cleanup':groupcompress.cleanup_pack_group, |
|
1433 |
'factory':groupcompress.make_pack_factory(False, False, 1), |
|
1434 |
'graph': False, |
|
1435 |
'key_length':1, |
|
1436 |
'support_partial_insertion':False, |
|
1437 |
}),
|
|
1438 |
]
|
|
1439 |
len_two_scenarios = [ |
|
1440 |
('weave-prefix', { |
|
1441 |
'cleanup':None, |
|
1442 |
'factory':make_versioned_files_factory(WeaveFile, |
|
1443 |
PrefixMapper()), |
|
1444 |
'graph':True, |
|
1445 |
'key_length':2, |
|
1446 |
'support_partial_insertion': False, |
|
1447 |
}),
|
|
1448 |
('annotated-knit-escape', { |
|
1449 |
'cleanup':None, |
|
1450 |
'factory':make_file_factory(True, HashEscapedPrefixMapper()), |
|
1451 |
'graph':True, |
|
1452 |
'key_length':2, |
|
1453 |
'support_partial_insertion': False, |
|
1454 |
}),
|
|
1455 |
('plain-knit-pack', { |
|
1456 |
'cleanup':cleanup_pack_knit, |
|
1457 |
'factory':make_pack_factory(True, True, 2), |
|
1458 |
'graph':True, |
|
1459 |
'key_length':2, |
|
1460 |
'support_partial_insertion': True, |
|
1461 |
}),
|
|
1462 |
('groupcompress', { |
|
1463 |
'cleanup':groupcompress.cleanup_pack_group, |
|
1464 |
'factory':groupcompress.make_pack_factory(True, False, 1), |
|
1465 |
'graph': True, |
|
1466 |
'key_length':1, |
|
1467 |
'support_partial_insertion':False, |
|
1468 |
}),
|
|
1469 |
]
|
|
1470 |
||
1471 |
scenarios = len_one_scenarios + len_two_scenarios |
|
1472 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1473 |
def get_versionedfiles(self, relpath='files'): |
1474 |
transport = self.get_transport(relpath) |
|
1475 |
if relpath != '.': |
|
1476 |
transport.mkdir('.') |
|
1477 |
files = self.factory(transport) |
|
1478 |
if self.cleanup is not None: |
|
|
4985.2.1
by Vincent Ladeuil
Deploy addAttrCleanup on the whole test suite. |
1479 |
self.addCleanup(self.cleanup, files) |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1480 |
return files |
1481 |
||
|
4398.8.8
by John Arbash Meinel
Respond to Andrew's review comments. |
1482 |
def get_simple_key(self, suffix): |
1483 |
"""Return a key for the object under test.""" |
|
1484 |
if self.key_length == 1: |
|
1485 |
return (suffix,) |
|
1486 |
else: |
|
1487 |
return ('FileA',) + (suffix,) |
|
1488 |
||
|
5816.8.2
by Andrew Bennetts
Add test for without_fallbacks. |
1489 |
def test_add_fallback_implies_without_fallbacks(self): |
1490 |
f = self.get_versionedfiles('files') |
|
1491 |
if getattr(f, 'add_fallback_versioned_files', None) is None: |
|
1492 |
raise TestNotApplicable("%s doesn't support fallbacks" |
|
1493 |
% (f.__class__.__name__,)) |
|
1494 |
g = self.get_versionedfiles('fallback') |
|
1495 |
key_a = self.get_simple_key('a') |
|
1496 |
g.add_lines(key_a, [], ['\n']) |
|
1497 |
f.add_fallback_versioned_files(g) |
|
1498 |
self.assertTrue(key_a in f.get_parent_map([key_a])) |
|
1499 |
self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a])) |
|
1500 |
||
|
4398.8.1
by John Arbash Meinel
Add a VersionedFile.add_text() api. |
1501 |
def test_add_lines(self): |
1502 |
f = self.get_versionedfiles() |
|
|
4398.8.8
by John Arbash Meinel
Respond to Andrew's review comments. |
1503 |
key0 = self.get_simple_key('r0') |
1504 |
key1 = self.get_simple_key('r1') |
|
1505 |
key2 = self.get_simple_key('r2') |
|
1506 |
keyf = self.get_simple_key('foo') |
|
|
4398.8.1
by John Arbash Meinel
Add a VersionedFile.add_text() api. |
1507 |
f.add_lines(key0, [], ['a\n', 'b\n']) |
1508 |
if self.graph: |
|
1509 |
f.add_lines(key1, [key0], ['b\n', 'c\n']) |
|
1510 |
else: |
|
1511 |
f.add_lines(key1, [], ['b\n', 'c\n']) |
|
1512 |
keys = f.keys() |
|
1513 |
self.assertTrue(key0 in keys) |
|
1514 |
self.assertTrue(key1 in keys) |
|
1515 |
records = [] |
|
1516 |
for record in f.get_record_stream([key0, key1], 'unordered', True): |
|
1517 |
records.append((record.key, record.get_bytes_as('fulltext'))) |
|
1518 |
records.sort() |
|
1519 |
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records) |
|
1520 |
||
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1521 |
def test__add_text(self): |
|
4398.8.1
by John Arbash Meinel
Add a VersionedFile.add_text() api. |
1522 |
f = self.get_versionedfiles() |
|
4398.8.8
by John Arbash Meinel
Respond to Andrew's review comments. |
1523 |
key0 = self.get_simple_key('r0') |
1524 |
key1 = self.get_simple_key('r1') |
|
1525 |
key2 = self.get_simple_key('r2') |
|
1526 |
keyf = self.get_simple_key('foo') |
|
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1527 |
f._add_text(key0, [], 'a\nb\n') |
|
4398.8.1
by John Arbash Meinel
Add a VersionedFile.add_text() api. |
1528 |
if self.graph: |
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1529 |
f._add_text(key1, [key0], 'b\nc\n') |
|
4398.8.1
by John Arbash Meinel
Add a VersionedFile.add_text() api. |
1530 |
else: |
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1531 |
f._add_text(key1, [], 'b\nc\n') |
|
4398.8.1
by John Arbash Meinel
Add a VersionedFile.add_text() api. |
1532 |
keys = f.keys() |
1533 |
self.assertTrue(key0 in keys) |
|
1534 |
self.assertTrue(key1 in keys) |
|
1535 |
records = [] |
|
1536 |
for record in f.get_record_stream([key0, key1], 'unordered', True): |
|
1537 |
records.append((record.key, record.get_bytes_as('fulltext'))) |
|
1538 |
records.sort() |
|
1539 |
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records) |
|
1540 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1541 |
def test_annotate(self): |
1542 |
files = self.get_versionedfiles() |
|
1543 |
self.get_diamond_files(files) |
|
1544 |
if self.key_length == 1: |
|
1545 |
prefix = () |
|
1546 |
else: |
|
1547 |
prefix = ('FileA',) |
|
1548 |
# introduced full text
|
|
1549 |
origins = files.annotate(prefix + ('origin',)) |
|
1550 |
self.assertEqual([ |
|
1551 |
(prefix + ('origin',), 'origin\n')], |
|
1552 |
origins) |
|
1553 |
# a delta
|
|
1554 |
origins = files.annotate(prefix + ('base',)) |
|
1555 |
self.assertEqual([ |
|
1556 |
(prefix + ('base',), 'base\n')], |
|
1557 |
origins) |
|
1558 |
# a merge
|
|
1559 |
origins = files.annotate(prefix + ('merged',)) |
|
1560 |
if self.graph: |
|
1561 |
self.assertEqual([ |
|
1562 |
(prefix + ('base',), 'base\n'), |
|
1563 |
(prefix + ('left',), 'left\n'), |
|
1564 |
(prefix + ('right',), 'right\n'), |
|
1565 |
(prefix + ('merged',), 'merged\n') |
|
1566 |
],
|
|
1567 |
origins) |
|
1568 |
else: |
|
1569 |
# Without a graph everything is new.
|
|
1570 |
self.assertEqual([ |
|
1571 |
(prefix + ('merged',), 'base\n'), |
|
1572 |
(prefix + ('merged',), 'left\n'), |
|
1573 |
(prefix + ('merged',), 'right\n'), |
|
1574 |
(prefix + ('merged',), 'merged\n') |
|
1575 |
],
|
|
1576 |
origins) |
|
1577 |
self.assertRaises(RevisionNotPresent, |
|
1578 |
files.annotate, prefix + ('missing-key',)) |
|
1579 |
||
|
4332.3.26
by Robert Collins
Allow passing keys to check to VersionedFile.check(). |
1580 |
def test_check_no_parameters(self): |
1581 |
files = self.get_versionedfiles() |
|
1582 |
||
1583 |
def test_check_progressbar_parameter(self): |
|
1584 |
"""A progress bar can be supplied because check can be a generator.""" |
|
1585 |
pb = ui.ui_factory.nested_progress_bar() |
|
1586 |
self.addCleanup(pb.finished) |
|
1587 |
files = self.get_versionedfiles() |
|
1588 |
files.check(progress_bar=pb) |
|
1589 |
||
1590 |
def test_check_with_keys_becomes_generator(self): |
|
|
4454.3.65
by John Arbash Meinel
Tests that VF implementations support .get_annotator() |
1591 |
files = self.get_versionedfiles() |
1592 |
self.get_diamond_files(files) |
|
|
4332.3.26
by Robert Collins
Allow passing keys to check to VersionedFile.check(). |
1593 |
keys = files.keys() |
1594 |
entries = files.check(keys=keys) |
|
1595 |
seen = set() |
|
1596 |
# Texts output should be fulltexts.
|
|
1597 |
self.capture_stream(files, entries, seen.add, |
|
1598 |
files.get_parent_map(keys), require_fulltext=True) |
|
1599 |
# All texts should be output.
|
|
1600 |
self.assertEqual(set(keys), seen) |
|
|
4454.3.65
by John Arbash Meinel
Tests that VF implementations support .get_annotator() |
1601 |
|
|
4744.2.5
by John Arbash Meinel
Change to a generic 'VersionedFiles.clear_cache()' api. |
1602 |
def test_clear_cache(self): |
1603 |
files = self.get_versionedfiles() |
|
1604 |
files.clear_cache() |
|
1605 |
||
|
3350.6.2
by Robert Collins
Prepare parameterised test environment. |
1606 |
def test_construct(self): |
1607 |
"""Each parameterised test can be constructed on a transport.""" |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1608 |
files = self.get_versionedfiles() |
1609 |
||
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1610 |
def get_diamond_files(self, files, trailing_eol=True, left_only=False, |
1611 |
nokeys=False): |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1612 |
return get_diamond_files(files, self.key_length, |
1613 |
trailing_eol=trailing_eol, nograph=not self.graph, |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1614 |
left_only=left_only, nokeys=nokeys) |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1615 |
|
|
4398.8.4
by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles |
1616 |
def _add_content_nostoresha(self, add_lines): |
|
4119.1.1
by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations. |
1617 |
"""When nostore_sha is supplied using old content raises.""" |
1618 |
vf = self.get_versionedfiles() |
|
1619 |
empty_text = ('a', []) |
|
1620 |
sample_text_nl = ('b', ["foo\n", "bar\n"]) |
|
1621 |
sample_text_no_nl = ('c', ["foo\n", "bar"]) |
|
1622 |
shas = [] |
|
1623 |
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl): |
|
|
4398.8.4
by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles |
1624 |
if add_lines: |
1625 |
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], |
|
1626 |
lines) |
|
1627 |
else: |
|
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1628 |
sha, _, _ = vf._add_text(self.get_simple_key(version), [], |
1629 |
''.join(lines)) |
|
|
4119.1.1
by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations. |
1630 |
shas.append(sha) |
1631 |
# we now have a copy of all the lines in the vf.
|
|
1632 |
for sha, (version, lines) in zip( |
|
1633 |
shas, (empty_text, sample_text_nl, sample_text_no_nl)): |
|
1634 |
new_key = self.get_simple_key(version + "2") |
|
1635 |
self.assertRaises(errors.ExistingContent, |
|
1636 |
vf.add_lines, new_key, [], lines, |
|
1637 |
nostore_sha=sha) |
|
|
4398.8.4
by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles |
1638 |
self.assertRaises(errors.ExistingContent, |
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1639 |
vf._add_text, new_key, [], ''.join(lines), |
|
4398.8.4
by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles |
1640 |
nostore_sha=sha) |
|
4119.1.1
by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations. |
1641 |
# and no new version should have been added.
|
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
1642 |
record = next(vf.get_record_stream([new_key], 'unordered', True)) |
|
4119.1.1
by John Arbash Meinel
Move the 'add_lines_nostoresha' to being tested against all VF implementations. |
1643 |
self.assertEqual('absent', record.storage_kind) |
1644 |
||
|
4398.8.4
by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles |
1645 |
def test_add_lines_nostoresha(self): |
1646 |
self._add_content_nostoresha(add_lines=True) |
|
1647 |
||
|
4398.8.6
by John Arbash Meinel
Switch the api from VF.add_text to VF._add_text and trim some extra 'features'. |
1648 |
def test__add_text_nostoresha(self): |
|
4398.8.4
by John Arbash Meinel
Implement add_text for GroupCompressVersionedFiles |
1649 |
self._add_content_nostoresha(add_lines=False) |
1650 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1651 |
def test_add_lines_return(self): |
1652 |
files = self.get_versionedfiles() |
|
1653 |
# save code by using the stock data insertion helper.
|
|
1654 |
adds = self.get_diamond_files(files) |
|
1655 |
results = [] |
|
1656 |
# We can only validate the first 2 elements returned from add_lines.
|
|
1657 |
for add in adds: |
|
1658 |
self.assertEqual(3, len(add)) |
|
1659 |
results.append(add[:2]) |
|
1660 |
if self.key_length == 1: |
|
1661 |
self.assertEqual([ |
|
1662 |
('00e364d235126be43292ab09cb4686cf703ddc17', 7), |
|
1663 |
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5), |
|
1664 |
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10), |
|
1665 |
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11), |
|
1666 |
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)], |
|
1667 |
results) |
|
1668 |
elif self.key_length == 2: |
|
1669 |
self.assertEqual([ |
|
1670 |
('00e364d235126be43292ab09cb4686cf703ddc17', 7), |
|
1671 |
('00e364d235126be43292ab09cb4686cf703ddc17', 7), |
|
1672 |
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5), |
|
1673 |
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5), |
|
1674 |
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10), |
|
1675 |
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10), |
|
1676 |
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11), |
|
1677 |
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11), |
|
1678 |
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23), |
|
1679 |
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)], |
|
1680 |
results) |
|
1681 |
||
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1682 |
def test_add_lines_no_key_generates_chk_key(self): |
1683 |
files = self.get_versionedfiles() |
|
1684 |
# save code by using the stock data insertion helper.
|
|
1685 |
adds = self.get_diamond_files(files, nokeys=True) |
|
1686 |
results = [] |
|
1687 |
# We can only validate the first 2 elements returned from add_lines.
|
|
1688 |
for add in adds: |
|
1689 |
self.assertEqual(3, len(add)) |
|
1690 |
results.append(add[:2]) |
|
1691 |
if self.key_length == 1: |
|
1692 |
self.assertEqual([ |
|
1693 |
('00e364d235126be43292ab09cb4686cf703ddc17', 7), |
|
1694 |
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5), |
|
1695 |
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10), |
|
1696 |
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11), |
|
1697 |
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)], |
|
1698 |
results) |
|
1699 |
# Check the added items got CHK keys.
|
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
1700 |
self.assertEqual({ |
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1701 |
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',), |
1702 |
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',), |
|
1703 |
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',), |
|
1704 |
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',), |
|
1705 |
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',), |
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
1706 |
},
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1707 |
files.keys()) |
1708 |
elif self.key_length == 2: |
|
1709 |
self.assertEqual([ |
|
1710 |
('00e364d235126be43292ab09cb4686cf703ddc17', 7), |
|
1711 |
('00e364d235126be43292ab09cb4686cf703ddc17', 7), |
|
1712 |
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5), |
|
1713 |
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5), |
|
1714 |
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10), |
|
1715 |
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10), |
|
1716 |
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11), |
|
1717 |
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11), |
|
1718 |
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23), |
|
1719 |
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)], |
|
1720 |
results) |
|
1721 |
# Check the added items got CHK keys.
|
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
1722 |
self.assertEqual({ |
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1723 |
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'), |
1724 |
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'), |
|
1725 |
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'), |
|
1726 |
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'), |
|
1727 |
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'), |
|
1728 |
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'), |
|
1729 |
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'), |
|
1730 |
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'), |
|
1731 |
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'), |
|
1732 |
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'), |
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
1733 |
},
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1734 |
files.keys()) |
1735 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1736 |
def test_empty_lines(self): |
1737 |
"""Empty files can be stored.""" |
|
1738 |
f = self.get_versionedfiles() |
|
1739 |
key_a = self.get_simple_key('a') |
|
1740 |
f.add_lines(key_a, [], []) |
|
1741 |
self.assertEqual('', |
|
1742 |
f.get_record_stream([key_a], 'unordered', True |
|
1743 |
).next().get_bytes_as('fulltext')) |
|
1744 |
key_b = self.get_simple_key('b') |
|
1745 |
f.add_lines(key_b, self.get_parents([key_a]), []) |
|
1746 |
self.assertEqual('', |
|
1747 |
f.get_record_stream([key_b], 'unordered', True |
|
1748 |
).next().get_bytes_as('fulltext')) |
|
1749 |
||
1750 |
def test_newline_only(self): |
|
1751 |
f = self.get_versionedfiles() |
|
1752 |
key_a = self.get_simple_key('a') |
|
1753 |
f.add_lines(key_a, [], ['\n']) |
|
1754 |
self.assertEqual('\n', |
|
1755 |
f.get_record_stream([key_a], 'unordered', True |
|
1756 |
).next().get_bytes_as('fulltext')) |
|
1757 |
key_b = self.get_simple_key('b') |
|
1758 |
f.add_lines(key_b, self.get_parents([key_a]), ['\n']) |
|
1759 |
self.assertEqual('\n', |
|
1760 |
f.get_record_stream([key_b], 'unordered', True |
|
1761 |
).next().get_bytes_as('fulltext')) |
|
1762 |
||
|
4593.5.35
by John Arbash Meinel
Start working on a per-vf implementation test of find_ancestry. |
1763 |
def test_get_known_graph_ancestry(self): |
1764 |
f = self.get_versionedfiles() |
|
|
4593.5.36
by John Arbash Meinel
a few more implementations of the interface. |
1765 |
if not self.graph: |
1766 |
raise TestNotApplicable('ancestry info only relevant with graph.') |
|
|
4593.5.35
by John Arbash Meinel
Start working on a per-vf implementation test of find_ancestry. |
1767 |
key_a = self.get_simple_key('a') |
1768 |
key_b = self.get_simple_key('b') |
|
1769 |
key_c = self.get_simple_key('c') |
|
1770 |
# A
|
|
1771 |
# |\
|
|
1772 |
# | B
|
|
1773 |
# |/
|
|
1774 |
# C
|
|
1775 |
f.add_lines(key_a, [], ['\n']) |
|
1776 |
f.add_lines(key_b, [key_a], ['\n']) |
|
1777 |
f.add_lines(key_c, [key_a, key_b], ['\n']) |
|
1778 |
kg = f.get_known_graph_ancestry([key_c]) |
|
1779 |
self.assertIsInstance(kg, _mod_graph.KnownGraph) |
|
1780 |
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort())) |
|
1781 |
||
|
4634.11.2
by John Arbash Meinel
Teach VF.get_known_graph_ancestry to go to fallbacks (bug #419241) |
1782 |
def test_known_graph_with_fallbacks(self): |
1783 |
f = self.get_versionedfiles('files') |
|
1784 |
if not self.graph: |
|
1785 |
raise TestNotApplicable('ancestry info only relevant with graph.') |
|
1786 |
if getattr(f, 'add_fallback_versioned_files', None) is None: |
|
1787 |
raise TestNotApplicable("%s doesn't support fallbacks" |
|
1788 |
% (f.__class__.__name__,)) |
|
1789 |
key_a = self.get_simple_key('a') |
|
1790 |
key_b = self.get_simple_key('b') |
|
1791 |
key_c = self.get_simple_key('c') |
|
1792 |
# A only in fallback
|
|
1793 |
# |\
|
|
1794 |
# | B
|
|
1795 |
# |/
|
|
1796 |
# C
|
|
1797 |
g = self.get_versionedfiles('fallback') |
|
1798 |
g.add_lines(key_a, [], ['\n']) |
|
1799 |
f.add_fallback_versioned_files(g) |
|
1800 |
f.add_lines(key_b, [key_a], ['\n']) |
|
1801 |
f.add_lines(key_c, [key_a, key_b], ['\n']) |
|
1802 |
kg = f.get_known_graph_ancestry([key_c]) |
|
1803 |
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort())) |
|
1804 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1805 |
def test_get_record_stream_empty(self): |
1806 |
"""An empty stream can be requested without error.""" |
|
1807 |
f = self.get_versionedfiles() |
|
1808 |
entries = f.get_record_stream([], 'unordered', False) |
|
1809 |
self.assertEqual([], list(entries)) |
|
1810 |
||
1811 |
def assertValidStorageKind(self, storage_kind): |
|
1812 |
"""Assert that storage_kind is a valid storage_kind.""" |
|
1813 |
self.assertSubset([storage_kind], |
|
1814 |
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta', |
|
|
3890.2.1
by John Arbash Meinel
Start working on a ChunkedContentFactory. |
1815 |
'knit-ft', 'knit-delta', 'chunked', 'fulltext', |
1816 |
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz', |
|
|
4005.3.6
by Robert Collins
Support delta_closure=True with NetworkRecordStream to transmit deltas over the wire when full text extraction is required on the far end. |
1817 |
'knit-delta-gz', |
|
3735.32.12
by John Arbash Meinel
Add groupcompress-block[-ref] as valid stream types. |
1818 |
'knit-delta-closure', 'knit-delta-closure-ref', |
1819 |
'groupcompress-block', 'groupcompress-block-ref']) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1820 |
|
|
4332.3.26
by Robert Collins
Allow passing keys to check to VersionedFile.check(). |
1821 |
def capture_stream(self, f, entries, on_seen, parents, |
1822 |
require_fulltext=False): |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1823 |
"""Capture a stream for testing.""" |
1824 |
for factory in entries: |
|
1825 |
on_seen(factory.key) |
|
1826 |
self.assertValidStorageKind(factory.storage_kind) |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1827 |
if factory.sha1 is not None: |
1828 |
self.assertEqual(f.get_sha1s([factory.key])[factory.key], |
|
1829 |
factory.sha1) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1830 |
self.assertEqual(parents[factory.key], factory.parents) |
1831 |
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind), |
|
1832 |
str) |
|
|
4332.3.26
by Robert Collins
Allow passing keys to check to VersionedFile.check(). |
1833 |
if require_fulltext: |
1834 |
factory.get_bytes_as('fulltext') |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1835 |
|
1836 |
def test_get_record_stream_interface(self): |
|
1837 |
"""each item in a stream has to provide a regular interface.""" |
|
1838 |
files = self.get_versionedfiles() |
|
1839 |
self.get_diamond_files(files) |
|
1840 |
keys, _ = self.get_keys_and_sort_order() |
|
1841 |
parent_map = files.get_parent_map(keys) |
|
1842 |
entries = files.get_record_stream(keys, 'unordered', False) |
|
1843 |
seen = set() |
|
1844 |
self.capture_stream(files, entries, seen.add, parent_map) |
|
1845 |
self.assertEqual(set(keys), seen) |
|
1846 |
||
1847 |
def get_keys_and_sort_order(self): |
|
1848 |
"""Get diamond test keys list, and their sort ordering.""" |
|
1849 |
if self.key_length == 1: |
|
1850 |
keys = [('merged',), ('left',), ('right',), ('base',)] |
|
1851 |
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0} |
|
1852 |
else: |
|
1853 |
keys = [ |
|
1854 |
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'), |
|
1855 |
('FileA', 'base'), |
|
1856 |
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'), |
|
1857 |
('FileB', 'base'), |
|
1858 |
]
|
|
1859 |
sort_order = { |
|
1860 |
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1, |
|
1861 |
('FileA', 'base'):0, |
|
1862 |
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1, |
|
1863 |
('FileB', 'base'):0, |
|
1864 |
}
|
|
1865 |
return keys, sort_order |
|
1866 |
||
|
4111.1.1
by Robert Collins
Add a groupcompress sort order. |
1867 |
def get_keys_and_groupcompress_sort_order(self): |
1868 |
"""Get diamond test keys list, and their groupcompress sort ordering.""" |
|
1869 |
if self.key_length == 1: |
|
1870 |
keys = [('merged',), ('left',), ('right',), ('base',)] |
|
1871 |
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2} |
|
1872 |
else: |
|
1873 |
keys = [ |
|
1874 |
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'), |
|
1875 |
('FileA', 'base'), |
|
1876 |
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'), |
|
1877 |
('FileB', 'base'), |
|
1878 |
]
|
|
1879 |
sort_order = { |
|
1880 |
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1, |
|
1881 |
('FileA', 'base'):2, |
|
1882 |
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4, |
|
1883 |
('FileB', 'base'):5, |
|
1884 |
}
|
|
1885 |
return keys, sort_order |
|
1886 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1887 |
def test_get_record_stream_interface_ordered(self): |
1888 |
"""each item in a stream has to provide a regular interface.""" |
|
1889 |
files = self.get_versionedfiles() |
|
1890 |
self.get_diamond_files(files) |
|
1891 |
keys, sort_order = self.get_keys_and_sort_order() |
|
1892 |
parent_map = files.get_parent_map(keys) |
|
1893 |
entries = files.get_record_stream(keys, 'topological', False) |
|
1894 |
seen = [] |
|
1895 |
self.capture_stream(files, entries, seen.append, parent_map) |
|
1896 |
self.assertStreamOrder(sort_order, seen, keys) |
|
1897 |
||
1898 |
def test_get_record_stream_interface_ordered_with_delta_closure(self): |
|
1899 |
"""each item must be accessible as a fulltext.""" |
|
1900 |
files = self.get_versionedfiles() |
|
1901 |
self.get_diamond_files(files) |
|
1902 |
keys, sort_order = self.get_keys_and_sort_order() |
|
1903 |
parent_map = files.get_parent_map(keys) |
|
1904 |
entries = files.get_record_stream(keys, 'topological', True) |
|
1905 |
seen = [] |
|
1906 |
for factory in entries: |
|
1907 |
seen.append(factory.key) |
|
1908 |
self.assertValidStorageKind(factory.storage_kind) |
|
|
3350.8.3
by Robert Collins
VF.get_sha1s needed changing to be stackable. |
1909 |
self.assertSubset([factory.sha1], |
1910 |
[None, files.get_sha1s([factory.key])[factory.key]]) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1911 |
self.assertEqual(parent_map[factory.key], factory.parents) |
1912 |
# self.assertEqual(files.get_text(factory.key),
|
|
|
3890.2.1
by John Arbash Meinel
Start working on a ChunkedContentFactory. |
1913 |
ft_bytes = factory.get_bytes_as('fulltext') |
1914 |
self.assertIsInstance(ft_bytes, str) |
|
1915 |
chunked_bytes = factory.get_bytes_as('chunked') |
|
1916 |
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes)) |
|
1917 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1918 |
self.assertStreamOrder(sort_order, seen, keys) |
1919 |
||
|
4111.1.1
by Robert Collins
Add a groupcompress sort order. |
1920 |
def test_get_record_stream_interface_groupcompress(self): |
1921 |
"""each item in a stream has to provide a regular interface.""" |
|
1922 |
files = self.get_versionedfiles() |
|
1923 |
self.get_diamond_files(files) |
|
1924 |
keys, sort_order = self.get_keys_and_groupcompress_sort_order() |
|
1925 |
parent_map = files.get_parent_map(keys) |
|
1926 |
entries = files.get_record_stream(keys, 'groupcompress', False) |
|
1927 |
seen = [] |
|
1928 |
self.capture_stream(files, entries, seen.append, parent_map) |
|
1929 |
self.assertStreamOrder(sort_order, seen, keys) |
|
1930 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1931 |
def assertStreamOrder(self, sort_order, seen, keys): |
1932 |
self.assertEqual(len(set(seen)), len(keys)) |
|
1933 |
if self.key_length == 1: |
|
1934 |
lows = {():0} |
|
1935 |
else: |
|
1936 |
lows = {('FileA',):0, ('FileB',):0} |
|
1937 |
if not self.graph: |
|
1938 |
self.assertEqual(set(keys), set(seen)) |
|
1939 |
else: |
|
1940 |
for key in seen: |
|
1941 |
sort_pos = sort_order[key] |
|
1942 |
self.assertTrue(sort_pos >= lows[key[:-1]], |
|
1943 |
"Out of order in sorted stream: %r, %r" % (key, seen)) |
|
1944 |
lows[key[:-1]] = sort_pos |
|
1945 |
||
1946 |
def test_get_record_stream_unknown_storage_kind_raises(self): |
|
1947 |
"""Asking for a storage kind that the stream cannot supply raises.""" |
|
1948 |
files = self.get_versionedfiles() |
|
1949 |
self.get_diamond_files(files) |
|
1950 |
if self.key_length == 1: |
|
1951 |
keys = [('merged',), ('left',), ('right',), ('base',)] |
|
1952 |
else: |
|
1953 |
keys = [ |
|
1954 |
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'), |
|
1955 |
('FileA', 'base'), |
|
1956 |
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'), |
|
1957 |
('FileB', 'base'), |
|
1958 |
]
|
|
1959 |
parent_map = files.get_parent_map(keys) |
|
1960 |
entries = files.get_record_stream(keys, 'unordered', False) |
|
1961 |
# We track the contents because we should be able to try, fail a
|
|
1962 |
# particular kind and then ask for one that works and continue.
|
|
1963 |
seen = set() |
|
1964 |
for factory in entries: |
|
1965 |
seen.add(factory.key) |
|
1966 |
self.assertValidStorageKind(factory.storage_kind) |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
1967 |
if factory.sha1 is not None: |
1968 |
self.assertEqual(files.get_sha1s([factory.key])[factory.key], |
|
1969 |
factory.sha1) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
1970 |
self.assertEqual(parent_map[factory.key], factory.parents) |
1971 |
# currently no stream emits mpdiff
|
|
1972 |
self.assertRaises(errors.UnavailableRepresentation, |
|
1973 |
factory.get_bytes_as, 'mpdiff') |
|
1974 |
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind), |
|
1975 |
str) |
|
1976 |
self.assertEqual(set(keys), seen) |
|
1977 |
||
1978 |
def test_get_record_stream_missing_records_are_absent(self): |
|
1979 |
files = self.get_versionedfiles() |
|
1980 |
self.get_diamond_files(files) |
|
1981 |
if self.key_length == 1: |
|
1982 |
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)] |
|
1983 |
else: |
|
1984 |
keys = [ |
|
1985 |
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'), |
|
1986 |
('FileA', 'absent'), ('FileA', 'base'), |
|
1987 |
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'), |
|
1988 |
('FileB', 'absent'), ('FileB', 'base'), |
|
1989 |
('absent', 'absent'), |
|
1990 |
]
|
|
1991 |
parent_map = files.get_parent_map(keys) |
|
1992 |
entries = files.get_record_stream(keys, 'unordered', False) |
|
1993 |
self.assertAbsentRecord(files, keys, parent_map, entries) |
|
1994 |
entries = files.get_record_stream(keys, 'topological', False) |
|
1995 |
self.assertAbsentRecord(files, keys, parent_map, entries) |
|
1996 |
||
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
1997 |
def assertRecordHasContent(self, record, bytes): |
1998 |
"""Assert that record has the bytes bytes.""" |
|
1999 |
self.assertEqual(bytes, record.get_bytes_as('fulltext')) |
|
|
4005.3.7
by Robert Collins
Review feedback. |
2000 |
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked'))) |
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
2001 |
|
|
4005.3.2
by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully. |
2002 |
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self): |
2003 |
files = self.get_versionedfiles() |
|
2004 |
key = self.get_simple_key('foo') |
|
2005 |
files.add_lines(key, (), ['my text\n', 'content']) |
|
2006 |
stream = files.get_record_stream([key], 'unordered', False) |
|
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
2007 |
record = next(stream) |
|
4005.3.2
by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully. |
2008 |
if record.storage_kind in ('chunked', 'fulltext'): |
2009 |
# chunked and fulltext representations are for direct use not wire
|
|
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
2010 |
# serialisation: check they are able to be used directly. To send
|
2011 |
# such records over the wire translation will be needed.
|
|
2012 |
self.assertRecordHasContent(record, "my text\ncontent") |
|
|
4005.3.2
by Robert Collins
First passing NetworkRecordStream test - a fulltext from any record type which isn't a chunked or fulltext can be serialised and deserialised successfully. |
2013 |
else: |
2014 |
bytes = [record.get_bytes_as(record.storage_kind)] |
|
2015 |
network_stream = versionedfile.NetworkRecordStream(bytes).read() |
|
2016 |
source_record = record |
|
2017 |
records = [] |
|
2018 |
for record in network_stream: |
|
2019 |
records.append(record) |
|
2020 |
self.assertEqual(source_record.storage_kind, |
|
2021 |
record.storage_kind) |
|
2022 |
self.assertEqual(source_record.parents, record.parents) |
|
2023 |
self.assertEqual( |
|
2024 |
source_record.get_bytes_as(source_record.storage_kind), |
|
2025 |
record.get_bytes_as(record.storage_kind)) |
|
2026 |
self.assertEqual(1, len(records)) |
|
2027 |
||
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2028 |
def assertStreamMetaEqual(self, records, expected, stream): |
2029 |
"""Assert that streams expected and stream have the same records. |
|
|
4032.1.2
by John Arbash Meinel
Track down a few more files that have trailing whitespace. |
2030 |
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2031 |
:param records: A list to collect the seen records.
|
2032 |
:return: A generator of the records in stream.
|
|
2033 |
"""
|
|
|
6631.2.3
by Martin
Fix per_versionedfile test failures and rethink future_builtins |
2034 |
# We make assertions during copying to catch things early for easier
|
2035 |
# debugging. This must use the iterating zip() from the future.
|
|
|
6631.2.2
by Martin
Run 2to3 itertools fixer and refactor |
2036 |
for record, ref_record in zip(stream, expected): |
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2037 |
records.append(record) |
2038 |
self.assertEqual(ref_record.key, record.key) |
|
2039 |
self.assertEqual(ref_record.storage_kind, record.storage_kind) |
|
2040 |
self.assertEqual(ref_record.parents, record.parents) |
|
2041 |
yield record |
|
2042 |
||
2043 |
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts, |
|
2044 |
stream): |
|
2045 |
"""Convert a stream to a bytes iterator. |
|
2046 |
||
2047 |
:param skipped_records: A list with one element to increment when a
|
|
2048 |
record is skipped.
|
|
|
4032.1.2
by John Arbash Meinel
Track down a few more files that have trailing whitespace. |
2049 |
:param full_texts: A dict from key->fulltext representation, for
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2050 |
checking chunked or fulltext stored records.
|
2051 |
:param stream: A record_stream.
|
|
2052 |
:return: An iterator over the bytes of each record.
|
|
2053 |
"""
|
|
2054 |
for record in stream: |
|
2055 |
if record.storage_kind in ('chunked', 'fulltext'): |
|
2056 |
skipped_records[0] += 1 |
|
2057 |
# check the content is correct for direct use.
|
|
2058 |
self.assertRecordHasContent(record, full_texts[record.key]) |
|
2059 |
else: |
|
2060 |
yield record.get_bytes_as(record.storage_kind) |
|
2061 |
||
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
2062 |
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self): |
2063 |
files = self.get_versionedfiles() |
|
2064 |
target_files = self.get_versionedfiles('target') |
|
2065 |
key = self.get_simple_key('ft') |
|
2066 |
key_delta = self.get_simple_key('delta') |
|
2067 |
files.add_lines(key, (), ['my text\n', 'content']) |
|
2068 |
if self.graph: |
|
2069 |
delta_parents = (key,) |
|
2070 |
else: |
|
2071 |
delta_parents = () |
|
2072 |
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n']) |
|
2073 |
local = files.get_record_stream([key, key_delta], 'unordered', False) |
|
2074 |
ref = files.get_record_stream([key, key_delta], 'unordered', False) |
|
2075 |
skipped_records = [0] |
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2076 |
full_texts = { |
2077 |
key: "my text\ncontent", |
|
2078 |
key_delta: "different\ncontent\n", |
|
2079 |
}
|
|
2080 |
byte_stream = self.stream_to_bytes_or_skip_counter( |
|
2081 |
skipped_records, full_texts, local) |
|
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
2082 |
network_stream = versionedfile.NetworkRecordStream(byte_stream).read() |
2083 |
records = [] |
|
2084 |
# insert the stream from the network into a versioned files object so we can
|
|
2085 |
# check the content was carried across correctly without doing delta
|
|
2086 |
# inspection.
|
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2087 |
target_files.insert_record_stream( |
2088 |
self.assertStreamMetaEqual(records, ref, network_stream)) |
|
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
2089 |
# No duplicates on the wire thank you!
|
2090 |
self.assertEqual(2, len(records) + skipped_records[0]) |
|
2091 |
if len(records): |
|
|
4005.3.4
by Robert Collins
Test copying just a delta over the wire. |
2092 |
# if any content was copied it all must have all been.
|
2093 |
self.assertIdenticalVersionedFile(files, target_files) |
|
2094 |
||
2095 |
def test_get_record_stream_native_formats_are_wire_ready_delta(self): |
|
2096 |
# copy a delta over the wire
|
|
2097 |
files = self.get_versionedfiles() |
|
2098 |
target_files = self.get_versionedfiles('target') |
|
2099 |
key = self.get_simple_key('ft') |
|
2100 |
key_delta = self.get_simple_key('delta') |
|
2101 |
files.add_lines(key, (), ['my text\n', 'content']) |
|
2102 |
if self.graph: |
|
2103 |
delta_parents = (key,) |
|
2104 |
else: |
|
2105 |
delta_parents = () |
|
2106 |
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n']) |
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2107 |
# Copy the basis text across so we can reconstruct the delta during
|
2108 |
# insertion into target.
|
|
|
4005.3.4
by Robert Collins
Test copying just a delta over the wire. |
2109 |
target_files.insert_record_stream(files.get_record_stream([key], |
2110 |
'unordered', False)) |
|
2111 |
local = files.get_record_stream([key_delta], 'unordered', False) |
|
2112 |
ref = files.get_record_stream([key_delta], 'unordered', False) |
|
2113 |
skipped_records = [0] |
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2114 |
full_texts = { |
2115 |
key_delta: "different\ncontent\n", |
|
2116 |
}
|
|
2117 |
byte_stream = self.stream_to_bytes_or_skip_counter( |
|
2118 |
skipped_records, full_texts, local) |
|
|
4005.3.4
by Robert Collins
Test copying just a delta over the wire. |
2119 |
network_stream = versionedfile.NetworkRecordStream(byte_stream).read() |
2120 |
records = [] |
|
2121 |
# insert the stream from the network into a versioned files object so we can
|
|
2122 |
# check the content was carried across correctly without doing delta
|
|
2123 |
# inspection during check_stream.
|
|
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2124 |
target_files.insert_record_stream( |
2125 |
self.assertStreamMetaEqual(records, ref, network_stream)) |
|
|
4005.3.4
by Robert Collins
Test copying just a delta over the wire. |
2126 |
# No duplicates on the wire thank you!
|
2127 |
self.assertEqual(1, len(records) + skipped_records[0]) |
|
2128 |
if len(records): |
|
2129 |
# if any content was copied it all must have all been
|
|
|
4005.3.3
by Robert Collins
Test NetworkRecordStream with delta'd texts. |
2130 |
self.assertIdenticalVersionedFile(files, target_files) |
2131 |
||
|
4005.3.5
by Robert Collins
Interface level test for using delta_closure=True over the network. |
2132 |
def test_get_record_stream_wire_ready_delta_closure_included(self): |
2133 |
# copy a delta over the wire with the ability to get its full text.
|
|
2134 |
files = self.get_versionedfiles() |
|
2135 |
key = self.get_simple_key('ft') |
|
2136 |
key_delta = self.get_simple_key('delta') |
|
2137 |
files.add_lines(key, (), ['my text\n', 'content']) |
|
2138 |
if self.graph: |
|
2139 |
delta_parents = (key,) |
|
2140 |
else: |
|
2141 |
delta_parents = () |
|
2142 |
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n']) |
|
2143 |
local = files.get_record_stream([key_delta], 'unordered', True) |
|
2144 |
ref = files.get_record_stream([key_delta], 'unordered', True) |
|
2145 |
skipped_records = [0] |
|
2146 |
full_texts = { |
|
2147 |
key_delta: "different\ncontent\n", |
|
2148 |
}
|
|
2149 |
byte_stream = self.stream_to_bytes_or_skip_counter( |
|
2150 |
skipped_records, full_texts, local) |
|
2151 |
network_stream = versionedfile.NetworkRecordStream(byte_stream).read() |
|
2152 |
records = [] |
|
2153 |
# insert the stream from the network into a versioned files object so we can
|
|
2154 |
# check the content was carried across correctly without doing delta
|
|
2155 |
# inspection during check_stream.
|
|
2156 |
for record in self.assertStreamMetaEqual(records, ref, network_stream): |
|
2157 |
# we have to be able to get the full text out:
|
|
2158 |
self.assertRecordHasContent(record, full_texts[record.key]) |
|
2159 |
# No duplicates on the wire thank you!
|
|
2160 |
self.assertEqual(1, len(records) + skipped_records[0]) |
|
2161 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2162 |
def assertAbsentRecord(self, files, keys, parents, entries): |
2163 |
"""Helper for test_get_record_stream_missing_records_are_absent.""" |
|
2164 |
seen = set() |
|
2165 |
for factory in entries: |
|
2166 |
seen.add(factory.key) |
|
2167 |
if factory.key[-1] == 'absent': |
|
2168 |
self.assertEqual('absent', factory.storage_kind) |
|
2169 |
self.assertEqual(None, factory.sha1) |
|
2170 |
self.assertEqual(None, factory.parents) |
|
2171 |
else: |
|
2172 |
self.assertValidStorageKind(factory.storage_kind) |
|
|
4241.4.1
by Ian Clatworthy
add sha generation support to versionedfiles |
2173 |
if factory.sha1 is not None: |
2174 |
sha1 = files.get_sha1s([factory.key])[factory.key] |
|
2175 |
self.assertEqual(sha1, factory.sha1) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2176 |
self.assertEqual(parents[factory.key], factory.parents) |
2177 |
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind), |
|
2178 |
str) |
|
2179 |
self.assertEqual(set(keys), seen) |
|
2180 |
||
2181 |
def test_filter_absent_records(self): |
|
2182 |
"""Requested missing records can be filter trivially.""" |
|
2183 |
files = self.get_versionedfiles() |
|
2184 |
self.get_diamond_files(files) |
|
2185 |
keys, _ = self.get_keys_and_sort_order() |
|
2186 |
parent_map = files.get_parent_map(keys) |
|
2187 |
# Add an absent record in the middle of the present keys. (We don't ask
|
|
2188 |
# for just absent keys to ensure that content before and after the
|
|
2189 |
# absent keys is still delivered).
|
|
2190 |
present_keys = list(keys) |
|
2191 |
if self.key_length == 1: |
|
2192 |
keys.insert(2, ('extra',)) |
|
2193 |
else: |
|
2194 |
keys.insert(2, ('extra', 'extra')) |
|
2195 |
entries = files.get_record_stream(keys, 'unordered', False) |
|
2196 |
seen = set() |
|
2197 |
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add, |
|
2198 |
parent_map) |
|
2199 |
self.assertEqual(set(present_keys), seen) |
|
2200 |
||
2201 |
def get_mapper(self): |
|
2202 |
"""Get a mapper suitable for the key length of the test interface.""" |
|
2203 |
if self.key_length == 1: |
|
2204 |
return ConstantMapper('source') |
|
2205 |
else: |
|
2206 |
return HashEscapedPrefixMapper() |
|
2207 |
||
2208 |
def get_parents(self, parents): |
|
2209 |
"""Get parents, taking self.graph into consideration.""" |
|
2210 |
if self.graph: |
|
2211 |
return parents |
|
2212 |
else: |
|
2213 |
return None |
|
2214 |
||
|
4332.3.32
by Robert Collins
Merge bzr.dev. |
2215 |
def test_get_annotator(self): |
2216 |
files = self.get_versionedfiles() |
|
2217 |
self.get_diamond_files(files) |
|
2218 |
origin_key = self.get_simple_key('origin') |
|
2219 |
base_key = self.get_simple_key('base') |
|
2220 |
left_key = self.get_simple_key('left') |
|
2221 |
right_key = self.get_simple_key('right') |
|
2222 |
merged_key = self.get_simple_key('merged') |
|
2223 |
# annotator = files.get_annotator()
|
|
2224 |
# introduced full text
|
|
2225 |
origins, lines = files.get_annotator().annotate(origin_key) |
|
2226 |
self.assertEqual([(origin_key,)], origins) |
|
2227 |
self.assertEqual(['origin\n'], lines) |
|
2228 |
# a delta
|
|
2229 |
origins, lines = files.get_annotator().annotate(base_key) |
|
2230 |
self.assertEqual([(base_key,)], origins) |
|
2231 |
# a merge
|
|
2232 |
origins, lines = files.get_annotator().annotate(merged_key) |
|
2233 |
if self.graph: |
|
2234 |
self.assertEqual([ |
|
2235 |
(base_key,), |
|
2236 |
(left_key,), |
|
2237 |
(right_key,), |
|
2238 |
(merged_key,), |
|
2239 |
], origins) |
|
2240 |
else: |
|
2241 |
# Without a graph everything is new.
|
|
2242 |
self.assertEqual([ |
|
2243 |
(merged_key,), |
|
2244 |
(merged_key,), |
|
2245 |
(merged_key,), |
|
2246 |
(merged_key,), |
|
2247 |
], origins) |
|
2248 |
self.assertRaises(RevisionNotPresent, |
|
2249 |
files.get_annotator().annotate, self.get_simple_key('missing-key')) |
|
2250 |
||
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2251 |
def test_get_parent_map(self): |
2252 |
files = self.get_versionedfiles() |
|
2253 |
if self.key_length == 1: |
|
2254 |
parent_details = [ |
|
2255 |
(('r0',), self.get_parents(())), |
|
2256 |
(('r1',), self.get_parents((('r0',),))), |
|
2257 |
(('r2',), self.get_parents(())), |
|
2258 |
(('r3',), self.get_parents(())), |
|
2259 |
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))), |
|
2260 |
]
|
|
2261 |
else: |
|
2262 |
parent_details = [ |
|
2263 |
(('FileA', 'r0'), self.get_parents(())), |
|
2264 |
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))), |
|
2265 |
(('FileA', 'r2'), self.get_parents(())), |
|
2266 |
(('FileA', 'r3'), self.get_parents(())), |
|
2267 |
(('FileA', 'm'), self.get_parents((('FileA', 'r0'), |
|
2268 |
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))), |
|
2269 |
]
|
|
2270 |
for key, parents in parent_details: |
|
2271 |
files.add_lines(key, parents, []) |
|
2272 |
# immediately after adding it should be queryable.
|
|
2273 |
self.assertEqual({key:parents}, files.get_parent_map([key])) |
|
2274 |
# We can ask for an empty set
|
|
2275 |
self.assertEqual({}, files.get_parent_map([])) |
|
2276 |
# We can ask for many keys
|
|
2277 |
all_parents = dict(parent_details) |
|
2278 |
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys())) |
|
2279 |
# Absent keys are just not included in the result.
|
|
2280 |
keys = all_parents.keys() |
|
2281 |
if self.key_length == 1: |
|
2282 |
keys.insert(1, ('missing',)) |
|
2283 |
else: |
|
2284 |
keys.insert(1, ('missing', 'missing')) |
|
2285 |
# Absent keys are just ignored
|
|
2286 |
self.assertEqual(all_parents, files.get_parent_map(keys)) |
|
2287 |
||
2288 |
def test_get_sha1s(self): |
|
2289 |
files = self.get_versionedfiles() |
|
2290 |
self.get_diamond_files(files) |
|
2291 |
if self.key_length == 1: |
|
2292 |
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)] |
|
2293 |
else: |
|
2294 |
# ask for shas from different prefixes.
|
|
2295 |
keys = [ |
|
2296 |
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'), |
|
2297 |
('FileA', 'merged'), ('FileB', 'right'), |
|
2298 |
]
|
|
|
3350.8.3
by Robert Collins
VF.get_sha1s needed changing to be stackable. |
2299 |
self.assertEqual({ |
2300 |
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', |
|
2301 |
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17', |
|
2302 |
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f', |
|
2303 |
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d', |
|
2304 |
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', |
|
2305 |
},
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2306 |
files.get_sha1s(keys)) |
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
2307 |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2308 |
def test_insert_record_stream_empty(self): |
2309 |
"""Inserting an empty record stream should work.""" |
|
2310 |
files = self.get_versionedfiles() |
|
2311 |
files.insert_record_stream([]) |
|
2312 |
||
2313 |
def assertIdenticalVersionedFile(self, expected, actual): |
|
2314 |
"""Assert that left and right have the same contents.""" |
|
2315 |
self.assertEqual(set(actual.keys()), set(expected.keys())) |
|
2316 |
actual_parents = actual.get_parent_map(actual.keys()) |
|
2317 |
if self.graph: |
|
2318 |
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys())) |
|
2319 |
else: |
|
2320 |
for key, parents in actual_parents.items(): |
|
2321 |
self.assertEqual(None, parents) |
|
2322 |
for key in actual.keys(): |
|
2323 |
actual_text = actual.get_record_stream( |
|
2324 |
[key], 'unordered', True).next().get_bytes_as('fulltext') |
|
2325 |
expected_text = expected.get_record_stream( |
|
2326 |
[key], 'unordered', True).next().get_bytes_as('fulltext') |
|
2327 |
self.assertEqual(actual_text, expected_text) |
|
2328 |
||
2329 |
def test_insert_record_stream_fulltexts(self): |
|
2330 |
"""Any file should accept a stream of fulltexts.""" |
|
2331 |
files = self.get_versionedfiles() |
|
2332 |
mapper = self.get_mapper() |
|
2333 |
source_transport = self.get_transport('source') |
|
2334 |
source_transport.mkdir('.') |
|
2335 |
# weaves always output fulltexts.
|
|
2336 |
source = make_versioned_files_factory(WeaveFile, mapper)( |
|
2337 |
source_transport) |
|
2338 |
self.get_diamond_files(source, trailing_eol=False) |
|
2339 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2340 |
False) |
|
2341 |
files.insert_record_stream(stream) |
|
2342 |
self.assertIdenticalVersionedFile(source, files) |
|
2343 |
||
2344 |
def test_insert_record_stream_fulltexts_noeol(self): |
|
2345 |
"""Any file should accept a stream of fulltexts.""" |
|
2346 |
files = self.get_versionedfiles() |
|
2347 |
mapper = self.get_mapper() |
|
2348 |
source_transport = self.get_transport('source') |
|
2349 |
source_transport.mkdir('.') |
|
2350 |
# weaves always output fulltexts.
|
|
2351 |
source = make_versioned_files_factory(WeaveFile, mapper)( |
|
2352 |
source_transport) |
|
2353 |
self.get_diamond_files(source, trailing_eol=False) |
|
2354 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2355 |
False) |
|
2356 |
files.insert_record_stream(stream) |
|
2357 |
self.assertIdenticalVersionedFile(source, files) |
|
2358 |
||
2359 |
def test_insert_record_stream_annotated_knits(self): |
|
2360 |
"""Any file should accept a stream from plain knits.""" |
|
2361 |
files = self.get_versionedfiles() |
|
2362 |
mapper = self.get_mapper() |
|
2363 |
source_transport = self.get_transport('source') |
|
2364 |
source_transport.mkdir('.') |
|
2365 |
source = make_file_factory(True, mapper)(source_transport) |
|
2366 |
self.get_diamond_files(source) |
|
2367 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2368 |
False) |
|
2369 |
files.insert_record_stream(stream) |
|
2370 |
self.assertIdenticalVersionedFile(source, files) |
|
2371 |
||
2372 |
def test_insert_record_stream_annotated_knits_noeol(self): |
|
2373 |
"""Any file should accept a stream from plain knits.""" |
|
2374 |
files = self.get_versionedfiles() |
|
2375 |
mapper = self.get_mapper() |
|
2376 |
source_transport = self.get_transport('source') |
|
2377 |
source_transport.mkdir('.') |
|
2378 |
source = make_file_factory(True, mapper)(source_transport) |
|
2379 |
self.get_diamond_files(source, trailing_eol=False) |
|
2380 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2381 |
False) |
|
2382 |
files.insert_record_stream(stream) |
|
2383 |
self.assertIdenticalVersionedFile(source, files) |
|
2384 |
||
2385 |
def test_insert_record_stream_plain_knits(self): |
|
2386 |
"""Any file should accept a stream from plain knits.""" |
|
2387 |
files = self.get_versionedfiles() |
|
2388 |
mapper = self.get_mapper() |
|
2389 |
source_transport = self.get_transport('source') |
|
2390 |
source_transport.mkdir('.') |
|
2391 |
source = make_file_factory(False, mapper)(source_transport) |
|
2392 |
self.get_diamond_files(source) |
|
2393 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2394 |
False) |
|
2395 |
files.insert_record_stream(stream) |
|
2396 |
self.assertIdenticalVersionedFile(source, files) |
|
2397 |
||
2398 |
def test_insert_record_stream_plain_knits_noeol(self): |
|
2399 |
"""Any file should accept a stream from plain knits.""" |
|
2400 |
files = self.get_versionedfiles() |
|
2401 |
mapper = self.get_mapper() |
|
2402 |
source_transport = self.get_transport('source') |
|
2403 |
source_transport.mkdir('.') |
|
2404 |
source = make_file_factory(False, mapper)(source_transport) |
|
2405 |
self.get_diamond_files(source, trailing_eol=False) |
|
2406 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2407 |
False) |
|
2408 |
files.insert_record_stream(stream) |
|
2409 |
self.assertIdenticalVersionedFile(source, files) |
|
2410 |
||
2411 |
def test_insert_record_stream_existing_keys(self): |
|
2412 |
"""Inserting keys already in a file should not error.""" |
|
2413 |
files = self.get_versionedfiles() |
|
2414 |
source = self.get_versionedfiles('source') |
|
2415 |
self.get_diamond_files(source) |
|
2416 |
# insert some keys into f.
|
|
2417 |
self.get_diamond_files(files, left_only=True) |
|
2418 |
stream = source.get_record_stream(source.keys(), 'topological', |
|
2419 |
False) |
|
2420 |
files.insert_record_stream(stream) |
|
2421 |
self.assertIdenticalVersionedFile(source, files) |
|
2422 |
||
2423 |
def test_insert_record_stream_missing_keys(self): |
|
2424 |
"""Inserting a stream with absent keys should raise an error.""" |
|
2425 |
files = self.get_versionedfiles() |
|
2426 |
source = self.get_versionedfiles('source') |
|
2427 |
stream = source.get_record_stream([('missing',) * self.key_length], |
|
2428 |
'topological', False) |
|
2429 |
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream, |
|
2430 |
stream) |
|
2431 |
||
2432 |
def test_insert_record_stream_out_of_order(self): |
|
2433 |
"""An out of order stream can either error or work.""" |
|
2434 |
files = self.get_versionedfiles() |
|
2435 |
source = self.get_versionedfiles('source') |
|
2436 |
self.get_diamond_files(source) |
|
2437 |
if self.key_length == 1: |
|
2438 |
origin_keys = [('origin',)] |
|
2439 |
end_keys = [('merged',), ('left',)] |
|
2440 |
start_keys = [('right',), ('base',)] |
|
2441 |
else: |
|
2442 |
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')] |
|
2443 |
end_keys = [('FileA', 'merged',), ('FileA', 'left',), |
|
2444 |
('FileB', 'merged',), ('FileB', 'left',)] |
|
2445 |
start_keys = [('FileA', 'right',), ('FileA', 'base',), |
|
2446 |
('FileB', 'right',), ('FileB', 'base',)] |
|
2447 |
origin_entries = source.get_record_stream(origin_keys, 'unordered', False) |
|
2448 |
end_entries = source.get_record_stream(end_keys, 'topological', False) |
|
2449 |
start_entries = source.get_record_stream(start_keys, 'topological', False) |
|
|
6631.2.3
by Martin
Fix per_versionedfile test failures and rethink future_builtins |
2450 |
entries = itertools.chain(origin_entries, end_entries, start_entries) |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2451 |
try: |
2452 |
files.insert_record_stream(entries) |
|
2453 |
except RevisionNotPresent: |
|
2454 |
# Must not have corrupted the file.
|
|
2455 |
files.check() |
|
2456 |
else: |
|
2457 |
self.assertIdenticalVersionedFile(source, files) |
|
2458 |
||
|
4634.84.2
by Andrew Bennetts
Add test. |
2459 |
def test_insert_record_stream_long_parent_chain_out_of_order(self): |
2460 |
"""An out of order stream can either error or work.""" |
|
2461 |
if not self.graph: |
|
2462 |
raise TestNotApplicable('ancestry info only relevant with graph.') |
|
2463 |
# Create a reasonably long chain of records based on each other, where
|
|
2464 |
# most will be deltas.
|
|
2465 |
source = self.get_versionedfiles('source') |
|
2466 |
parents = () |
|
2467 |
keys = [] |
|
2468 |
content = [('same same %d\n' % n) for n in range(500)] |
|
2469 |
for letter in 'abcdefghijklmnopqrstuvwxyz': |
|
2470 |
key = ('key-' + letter,) |
|
2471 |
if self.key_length == 2: |
|
2472 |
key = ('prefix',) + key |
|
2473 |
content.append('content for ' + letter + '\n') |
|
2474 |
source.add_lines(key, parents, content) |
|
2475 |
keys.append(key) |
|
2476 |
parents = (key,) |
|
2477 |
# Create a stream of these records, excluding the first record that the
|
|
2478 |
# rest ultimately depend upon, and insert it into a new vf.
|
|
2479 |
streams = [] |
|
2480 |
for key in reversed(keys): |
|
2481 |
streams.append(source.get_record_stream([key], 'unordered', False)) |
|
|
6631.2.3
by Martin
Fix per_versionedfile test failures and rethink future_builtins |
2482 |
deltas = itertools.chain.from_iterable(streams[:-1]) |
|
4634.84.2
by Andrew Bennetts
Add test. |
2483 |
files = self.get_versionedfiles() |
2484 |
try: |
|
2485 |
files.insert_record_stream(deltas) |
|
2486 |
except RevisionNotPresent: |
|
2487 |
# Must not have corrupted the file.
|
|
2488 |
files.check() |
|
2489 |
else: |
|
2490 |
# Must only report either just the first key as a missing parent,
|
|
2491 |
# no key as missing (for nodelta scenarios).
|
|
2492 |
missing = set(files.get_missing_compression_parent_keys()) |
|
2493 |
missing.discard(keys[0]) |
|
2494 |
self.assertEqual(set(), missing) |
|
2495 |
||
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2496 |
def get_knit_delta_source(self): |
2497 |
"""Get a source that can produce a stream with knit delta records, |
|
2498 |
regardless of this test's scenario.
|
|
2499 |
"""
|
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2500 |
mapper = self.get_mapper() |
2501 |
source_transport = self.get_transport('source') |
|
2502 |
source_transport.mkdir('.') |
|
2503 |
source = make_file_factory(False, mapper)(source_transport) |
|
|
4009.3.1
by Andrew Bennetts
Fix test_insert_record_stream_delta_missing_basis_no_corruption to test what it claims to, and fix KnitVersionedFiles.get_record_stream to match the expected exception. |
2504 |
get_diamond_files(source, self.key_length, trailing_eol=True, |
2505 |
nograph=False, left_only=False) |
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2506 |
return source |
2507 |
||
2508 |
def test_insert_record_stream_delta_missing_basis_no_corruption(self): |
|
2509 |
"""Insertion where a needed basis is not included notifies the caller |
|
2510 |
of the missing basis. In the meantime a record missing its basis is
|
|
2511 |
not added.
|
|
2512 |
"""
|
|
2513 |
source = self.get_knit_delta_source() |
|
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2514 |
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')] |
2515 |
entries = source.get_record_stream(keys, 'unordered', False) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2516 |
files = self.get_versionedfiles() |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2517 |
if self.support_partial_insertion: |
|
4009.3.12
by Robert Collins
Polish on inserting record streams with missing compression parents. |
2518 |
self.assertEqual([], |
2519 |
list(files.get_missing_compression_parent_keys())) |
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2520 |
files.insert_record_stream(entries) |
2521 |
missing_bases = files.get_missing_compression_parent_keys() |
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
2522 |
self.assertEqual({self.get_simple_key('left')}, |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2523 |
set(missing_bases)) |
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2524 |
self.assertEqual(set(keys), set(files.get_parent_map(keys))) |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2525 |
else: |
2526 |
self.assertRaises( |
|
2527 |
errors.RevisionNotPresent, files.insert_record_stream, entries) |
|
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2528 |
files.check() |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2529 |
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2530 |
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self): |
2531 |
"""Insertion where a needed basis is not included notifies the caller |
|
2532 |
of the missing basis. That basis can be added in a second
|
|
2533 |
insert_record_stream call that does not need to repeat records present
|
|
|
4009.3.3
by Andrew Bennetts
Add docstrings. |
2534 |
in the previous stream. The record(s) that required that basis are
|
2535 |
fully inserted once their basis is no longer missing.
|
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2536 |
"""
|
2537 |
if not self.support_partial_insertion: |
|
2538 |
raise TestNotApplicable( |
|
2539 |
'versioned file scenario does not support partial insertion') |
|
2540 |
source = self.get_knit_delta_source() |
|
2541 |
entries = source.get_record_stream([self.get_simple_key('origin'), |
|
2542 |
self.get_simple_key('merged')], 'unordered', False) |
|
2543 |
files = self.get_versionedfiles() |
|
2544 |
files.insert_record_stream(entries) |
|
2545 |
missing_bases = files.get_missing_compression_parent_keys() |
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
2546 |
self.assertEqual({self.get_simple_key('left')}, |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2547 |
set(missing_bases)) |
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2548 |
# 'merged' is inserted (although a commit of a write group involving
|
2549 |
# this versionedfiles would fail).
|
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2550 |
merged_key = self.get_simple_key('merged') |
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2551 |
self.assertEqual( |
2552 |
[merged_key], files.get_parent_map([merged_key]).keys()) |
|
2553 |
# Add the full delta closure of the missing records
|
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2554 |
missing_entries = source.get_record_stream( |
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2555 |
missing_bases, 'unordered', True) |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2556 |
files.insert_record_stream(missing_entries) |
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2557 |
# Now 'merged' is fully inserted (and a commit would succeed).
|
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2558 |
self.assertEqual([], list(files.get_missing_compression_parent_keys())) |
2559 |
self.assertEqual( |
|
2560 |
[merged_key], files.get_parent_map([merged_key]).keys()) |
|
|
4009.3.7
by Andrew Bennetts
Most tests passing. |
2561 |
files.check() |
|
4009.3.2
by Andrew Bennetts
Add test_insert_record_stream_delta_missing_basis_can_be_added_later. |
2562 |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2563 |
def test_iter_lines_added_or_present_in_keys(self): |
2564 |
# test that we get at least an equalset of the lines added by
|
|
2565 |
# versions in the store.
|
|
2566 |
# the ordering here is to make a tree so that dumb searches have
|
|
2567 |
# more changes to muck up.
|
|
2568 |
||
|
4961.2.9
by Martin Pool
Rip out most remaining uses of DummyProgressBar |
2569 |
class InstrumentedProgress(progress.ProgressTask): |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2570 |
|
2571 |
def __init__(self): |
|
|
4961.2.9
by Martin Pool
Rip out most remaining uses of DummyProgressBar |
2572 |
progress.ProgressTask.__init__(self) |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2573 |
self.updates = [] |
2574 |
||
2575 |
def update(self, msg=None, current=None, total=None): |
|
2576 |
self.updates.append((msg, current, total)) |
|
2577 |
||
2578 |
files = self.get_versionedfiles() |
|
2579 |
# add a base to get included
|
|
2580 |
files.add_lines(self.get_simple_key('base'), (), ['base\n']) |
|
2581 |
# add a ancestor to be included on one side
|
|
2582 |
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n']) |
|
2583 |
# add a ancestor to be included on the other side
|
|
2584 |
files.add_lines(self.get_simple_key('rancestor'), |
|
2585 |
self.get_parents([self.get_simple_key('base')]), ['rancestor\n']) |
|
2586 |
# add a child of rancestor with no eofile-nl
|
|
2587 |
files.add_lines(self.get_simple_key('child'), |
|
2588 |
self.get_parents([self.get_simple_key('rancestor')]), |
|
2589 |
['base\n', 'child\n']) |
|
2590 |
# add a child of lancestor and base to join the two roots
|
|
2591 |
files.add_lines(self.get_simple_key('otherchild'), |
|
2592 |
self.get_parents([self.get_simple_key('lancestor'), |
|
2593 |
self.get_simple_key('base')]), |
|
2594 |
['base\n', 'lancestor\n', 'otherchild\n']) |
|
2595 |
def iter_with_keys(keys, expected): |
|
2596 |
# now we need to see what lines are returned, and how often.
|
|
2597 |
lines = {} |
|
2598 |
progress = InstrumentedProgress() |
|
2599 |
# iterate over the lines
|
|
2600 |
for line in files.iter_lines_added_or_present_in_keys(keys, |
|
2601 |
pb=progress): |
|
2602 |
lines.setdefault(line, 0) |
|
2603 |
lines[line] += 1 |
|
2604 |
if []!= progress.updates: |
|
2605 |
self.assertEqual(expected, progress.updates) |
|
2606 |
return lines |
|
2607 |
lines = iter_with_keys( |
|
2608 |
[self.get_simple_key('child'), self.get_simple_key('otherchild')], |
|
|
4103.3.4
by Martin Pool
Update test that depends on progress bar strings |
2609 |
[('Walking content', 0, 2), |
2610 |
('Walking content', 1, 2), |
|
2611 |
('Walking content', 2, 2)]) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2612 |
# we must see child and otherchild
|
2613 |
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0) |
|
2614 |
self.assertTrue( |
|
2615 |
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0) |
|
2616 |
# we dont care if we got more than that.
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
2617 |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2618 |
# test all lines
|
2619 |
lines = iter_with_keys(files.keys(), |
|
|
4103.3.4
by Martin Pool
Update test that depends on progress bar strings |
2620 |
[('Walking content', 0, 5), |
2621 |
('Walking content', 1, 5), |
|
2622 |
('Walking content', 2, 5), |
|
2623 |
('Walking content', 3, 5), |
|
2624 |
('Walking content', 4, 5), |
|
2625 |
('Walking content', 5, 5)]) |
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2626 |
# all lines must be seen at least once
|
2627 |
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0) |
|
2628 |
self.assertTrue( |
|
2629 |
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0) |
|
2630 |
self.assertTrue( |
|
2631 |
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0) |
|
2632 |
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0) |
|
2633 |
self.assertTrue( |
|
2634 |
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0) |
|
2635 |
||
2636 |
def test_make_mpdiffs(self): |
|
|
6622.1.34
by Jelmer Vernooij
Rename brzlib => breezy. |
2637 |
from breezy import multiparent |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2638 |
files = self.get_versionedfiles('source') |
2639 |
# add texts that should trip the knit maximum delta chain threshold
|
|
2640 |
# as well as doing parallel chains of data in knits.
|
|
2641 |
# this is done by two chains of 25 insertions
|
|
2642 |
files.add_lines(self.get_simple_key('base'), [], ['line\n']) |
|
2643 |
files.add_lines(self.get_simple_key('noeol'), |
|
2644 |
self.get_parents([self.get_simple_key('base')]), ['line']) |
|
2645 |
# detailed eol tests:
|
|
2646 |
# shared last line with parent no-eol
|
|
2647 |
files.add_lines(self.get_simple_key('noeolsecond'), |
|
2648 |
self.get_parents([self.get_simple_key('noeol')]), |
|
2649 |
['line\n', 'line']) |
|
2650 |
# differing last line with parent, both no-eol
|
|
2651 |
files.add_lines(self.get_simple_key('noeolnotshared'), |
|
2652 |
self.get_parents([self.get_simple_key('noeolsecond')]), |
|
2653 |
['line\n', 'phone']) |
|
2654 |
# add eol following a noneol parent, change content
|
|
2655 |
files.add_lines(self.get_simple_key('eol'), |
|
2656 |
self.get_parents([self.get_simple_key('noeol')]), ['phone\n']) |
|
2657 |
# add eol following a noneol parent, no change content
|
|
2658 |
files.add_lines(self.get_simple_key('eolline'), |
|
2659 |
self.get_parents([self.get_simple_key('noeol')]), ['line\n']) |
|
2660 |
# noeol with no parents:
|
|
2661 |
files.add_lines(self.get_simple_key('noeolbase'), [], ['line']) |
|
2662 |
# noeol preceeding its leftmost parent in the output:
|
|
2663 |
# this is done by making it a merge of two parents with no common
|
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
2664 |
# anestry: noeolbase and noeol with the
|
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2665 |
# later-inserted parent the leftmost.
|
2666 |
files.add_lines(self.get_simple_key('eolbeforefirstparent'), |
|
2667 |
self.get_parents([self.get_simple_key('noeolbase'), |
|
2668 |
self.get_simple_key('noeol')]), |
|
2669 |
['line']) |
|
2670 |
# two identical eol texts
|
|
2671 |
files.add_lines(self.get_simple_key('noeoldup'), |
|
2672 |
self.get_parents([self.get_simple_key('noeol')]), ['line']) |
|
2673 |
next_parent = self.get_simple_key('base') |
|
2674 |
text_name = 'chain1-' |
|
2675 |
text = ['line\n'] |
|
2676 |
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079', |
|
2677 |
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7', |
|
2678 |
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa', |
|
2679 |
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1', |
|
2680 |
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6', |
|
2681 |
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d', |
|
2682 |
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea', |
|
2683 |
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab', |
|
2684 |
8 :'779e9a0b28f9f832528d4b21e17e168c67697272', |
|
2685 |
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f', |
|
2686 |
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05', |
|
2687 |
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199', |
|
2688 |
12:'31a2286267f24d8bedaa43355f8ad7129509ea85', |
|
2689 |
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a', |
|
2690 |
14:'2c4b1736566b8ca6051e668de68650686a3922f2', |
|
2691 |
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde', |
|
2692 |
16:'b0d2e18d3559a00580f6b49804c23fea500feab3', |
|
2693 |
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7', |
|
2694 |
18:'5cf64a3459ae28efa60239e44b20312d25b253f3', |
|
2695 |
19:'1ebed371807ba5935958ad0884595126e8c4e823', |
|
2696 |
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3', |
|
2697 |
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d', |
|
2698 |
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb', |
|
2699 |
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f', |
|
2700 |
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc', |
|
2701 |
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855', |
|
2702 |
}
|
|
2703 |
for depth in range(26): |
|
2704 |
new_version = self.get_simple_key(text_name + '%s' % depth) |
|
2705 |
text = text + ['line\n'] |
|
2706 |
files.add_lines(new_version, self.get_parents([next_parent]), text) |
|
2707 |
next_parent = new_version |
|
2708 |
next_parent = self.get_simple_key('base') |
|
2709 |
text_name = 'chain2-' |
|
2710 |
text = ['line\n'] |
|
2711 |
for depth in range(26): |
|
2712 |
new_version = self.get_simple_key(text_name + '%s' % depth) |
|
2713 |
text = text + ['line\n'] |
|
2714 |
files.add_lines(new_version, self.get_parents([next_parent]), text) |
|
2715 |
next_parent = new_version |
|
2716 |
target = self.get_versionedfiles('target') |
|
2717 |
for key in multiparent.topo_iter_keys(files, files.keys()): |
|
2718 |
mpdiff = files.make_mpdiffs([key])[0] |
|
2719 |
parents = files.get_parent_map([key])[key] or [] |
|
2720 |
target.add_mpdiffs( |
|
|
3350.8.3
by Robert Collins
VF.get_sha1s needed changing to be stackable. |
2721 |
[(key, parents, files.get_sha1s([key])[key], mpdiff)]) |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
2722 |
self.assertEqualDiff( |
2723 |
files.get_record_stream([key], 'unordered', |
|
2724 |
True).next().get_bytes_as('fulltext'), |
|
2725 |
target.get_record_stream([key], 'unordered', |
|
2726 |
True).next().get_bytes_as('fulltext') |
|
2727 |
)
|
|
2728 |
||
2729 |
def test_keys(self): |
|
2730 |
# While use is discouraged, versions() is still needed by aspects of
|
|
2731 |
# bzr.
|
|
2732 |
files = self.get_versionedfiles() |
|
2733 |
self.assertEqual(set(), set(files.keys())) |
|
2734 |
if self.key_length == 1: |
|
2735 |
key = ('foo',) |
|
2736 |
else: |
|
2737 |
key = ('foo', 'bar',) |
|
2738 |
files.add_lines(key, (), []) |
|
|
6619.3.12
by Jelmer Vernooij
Use 2to3 set_literal fixer. |
2739 |
self.assertEqual({key}, set(files.keys())) |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2740 |
|
2741 |
||
2742 |
class VirtualVersionedFilesTests(TestCase): |
|
2743 |
"""Basic tests for the VirtualVersionedFiles implementations.""" |
|
2744 |
||
2745 |
def _get_parent_map(self, keys): |
|
2746 |
ret = {} |
|
2747 |
for k in keys: |
|
2748 |
if k in self._parent_map: |
|
2749 |
ret[k] = self._parent_map[k] |
|
2750 |
return ret |
|
2751 |
||
2752 |
def setUp(self): |
|
|
6552.1.4
by Vincent Ladeuil
Remaining tests matching setup(self) that can be rewritten with super(). |
2753 |
super(VirtualVersionedFilesTests, self).setUp() |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2754 |
self._lines = {} |
2755 |
self._parent_map = {} |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
2756 |
self.texts = VirtualVersionedFiles(self._get_parent_map, |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2757 |
self._lines.get) |
2758 |
||
2759 |
def test_add_lines(self): |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
2760 |
self.assertRaises(NotImplementedError, |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2761 |
self.texts.add_lines, "foo", [], []) |
2762 |
||
2763 |
def test_add_mpdiffs(self): |
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
2764 |
self.assertRaises(NotImplementedError, |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2765 |
self.texts.add_mpdiffs, []) |
2766 |
||
|
4332.3.26
by Robert Collins
Allow passing keys to check to VersionedFile.check(). |
2767 |
def test_check_noerrors(self): |
2768 |
self.texts.check() |
|
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2769 |
|
2770 |
def test_insert_record_stream(self): |
|
2771 |
self.assertRaises(NotImplementedError, self.texts.insert_record_stream, |
|
2772 |
[])
|
|
2773 |
||
|
3518.1.2
by Jelmer Vernooij
Fix some stylistic issues pointed out by Ian. |
2774 |
def test_get_sha1s_nonexistent(self): |
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
2775 |
self.assertEqual({}, self.texts.get_sha1s([("NONEXISTENT",)])) |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2776 |
|
2777 |
def test_get_sha1s(self): |
|
2778 |
self._lines["key"] = ["dataline1", "dataline2"] |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
2779 |
self.assertEqual({("key",): osutils.sha_strings(self._lines["key"])}, |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2780 |
self.texts.get_sha1s([("key",)])) |
2781 |
||
2782 |
def test_get_parent_map(self): |
|
2783 |
self._parent_map = {"G": ("A", "B")} |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
2784 |
self.assertEqual({("G",): (("A",),("B",))}, |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2785 |
self.texts.get_parent_map([("G",), ("L",)])) |
2786 |
||
2787 |
def test_get_record_stream(self): |
|
2788 |
self._lines["A"] = ["FOO", "BAR"] |
|
2789 |
it = self.texts.get_record_stream([("A",)], "unordered", True) |
|
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
2790 |
record = next(it) |
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
2791 |
self.assertEqual("chunked", record.storage_kind) |
2792 |
self.assertEqual("FOOBAR", record.get_bytes_as("fulltext")) |
|
2793 |
self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked")) |
|
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2794 |
|
2795 |
def test_get_record_stream_absent(self): |
|
2796 |
it = self.texts.get_record_stream([("A",)], "unordered", True) |
|
|
6634.2.1
by Martin
Apply 2to3 next fixer and make compatible |
2797 |
record = next(it) |
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
2798 |
self.assertEqual("absent", record.storage_kind) |
|
3518.1.1
by Jelmer Vernooij
Add VirtualVersionedFiles class. |
2799 |
|
|
3949.4.1
by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys. |
2800 |
def test_iter_lines_added_or_present_in_keys(self): |
2801 |
self._lines["A"] = ["FOO", "BAR"] |
|
2802 |
self._lines["B"] = ["HEY"] |
|
2803 |
self._lines["C"] = ["Alberta"] |
|
2804 |
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)]) |
|
|
6614.1.3
by Vincent Ladeuil
Fix assertEquals being deprecated by using assertEqual. |
2805 |
self.assertEqual(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]), |
|
3949.4.1
by Jelmer Vernooij
Implement VirtualVersionedFiles.iter_lines_added_or_present_in_keys. |
2806 |
sorted(list(it))) |
2807 |
||
|
3871.4.1
by John Arbash Meinel
Add a VFDecorator that can yield records in a specified order |
2808 |
|
2809 |
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport): |
|
2810 |
||
2811 |
def get_ordering_vf(self, key_priority): |
|
2812 |
builder = self.make_branch_builder('test') |
|
2813 |
builder.start_series() |
|
2814 |
builder.build_snapshot('A', None, [ |
|
2815 |
('add', ('', 'TREE_ROOT', 'directory', None))]) |
|
2816 |
builder.build_snapshot('B', ['A'], []) |
|
2817 |
builder.build_snapshot('C', ['B'], []) |
|
2818 |
builder.build_snapshot('D', ['C'], []) |
|
2819 |
builder.finish_series() |
|
2820 |
b = builder.get_branch() |
|
2821 |
b.lock_read() |
|
2822 |
self.addCleanup(b.unlock) |
|
2823 |
vf = b.repository.inventories |
|
2824 |
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority) |
|
2825 |
||
2826 |
def test_get_empty(self): |
|
2827 |
vf = self.get_ordering_vf({}) |
|
2828 |
self.assertEqual([], vf.calls) |
|
2829 |
||
2830 |
def test_get_record_stream_topological(self): |
|
2831 |
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1}) |
|
2832 |
request_keys = [('B',), ('C',), ('D',), ('A',)] |
|
2833 |
keys = [r.key for r in vf.get_record_stream(request_keys, |
|
2834 |
'topological', False)] |
|
2835 |
# We should have gotten the keys in topological order
|
|
2836 |
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys) |
|
2837 |
# And recorded that the request was made
|
|
2838 |
self.assertEqual([('get_record_stream', request_keys, 'topological', |
|
2839 |
False)], vf.calls) |
|
2840 |
||
2841 |
def test_get_record_stream_ordered(self): |
|
2842 |
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1}) |
|
2843 |
request_keys = [('B',), ('C',), ('D',), ('A',)] |
|
2844 |
keys = [r.key for r in vf.get_record_stream(request_keys, |
|
2845 |
'unordered', False)] |
|
2846 |
# They should be returned based on their priority
|
|
2847 |
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys) |
|
2848 |
# And the request recorded
|
|
2849 |
self.assertEqual([('get_record_stream', request_keys, 'unordered', |
|
2850 |
False)], vf.calls) |
|
2851 |
||
2852 |
def test_get_record_stream_implicit_order(self): |
|
2853 |
vf = self.get_ordering_vf({('B',): 2, ('D',): 1}) |
|
2854 |
request_keys = [('B',), ('C',), ('D',), ('A',)] |
|
2855 |
keys = [r.key for r in vf.get_record_stream(request_keys, |
|
2856 |
'unordered', False)] |
|
2857 |
# A and C are not in the map, so they get sorted to the front. A comes
|
|
2858 |
# before C alphabetically, so it comes back first
|
|
2859 |
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys) |
|
2860 |
# And the request recorded
|
|
2861 |
self.assertEqual([('get_record_stream', request_keys, 'unordered', |
|
2862 |
False)], vf.calls) |