1
# Copyright (C) 2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""Direct tests of the btree serializer extension"""
24
from ..sixish import (
28
from .test_btree_index import compiled_btreeparser_feature
31
class TestBtreeSerializer(tests.TestCase):
33
_test_needs_features = [compiled_btreeparser_feature]
37
return compiled_btreeparser_feature.module
40
class TestHexAndUnhex(TestBtreeSerializer):
42
def assertHexlify(self, as_binary):
43
self.assertEqual(binascii.hexlify(as_binary),
44
self.module._py_hexlify(as_binary))
46
def assertUnhexlify(self, as_hex):
47
ba_unhex = binascii.unhexlify(as_hex)
48
mod_unhex = self.module._py_unhexlify(as_hex)
49
if ba_unhex != mod_unhex:
53
mod_hex = binascii.hexlify(mod_unhex)
54
self.fail('_py_unhexlify returned a different answer'
55
' from binascii:\n %r\n != %r'
56
% (binascii.hexlify(ba_unhex), mod_hex))
58
def assertFailUnhexlify(self, as_hex):
60
self.assertIs(None, self.module._py_unhexlify(as_hex))
62
def test_to_hex(self):
63
raw_bytes = b''.join(map(int2byte, range(256)))
64
for i in range(0, 240, 20):
65
self.assertHexlify(raw_bytes[i:i + 20])
66
self.assertHexlify(raw_bytes[240:] + raw_bytes[0:4])
68
def test_from_hex(self):
69
self.assertUnhexlify(b'0123456789abcdef0123456789abcdef01234567')
70
self.assertUnhexlify(b'123456789abcdef0123456789abcdef012345678')
71
self.assertUnhexlify(b'0123456789ABCDEF0123456789ABCDEF01234567')
72
self.assertUnhexlify(b'123456789ABCDEF0123456789ABCDEF012345678')
73
hex_chars = binascii.hexlify(b''.join(map(int2byte, range(256))))
74
for i in range(0, 480, 40):
75
self.assertUnhexlify(hex_chars[i:i + 40])
76
self.assertUnhexlify(hex_chars[480:] + hex_chars[0:8])
78
def test_from_invalid_hex(self):
79
self.assertFailUnhexlify(b'123456789012345678901234567890123456789X')
80
self.assertFailUnhexlify(b'12345678901234567890123456789012345678X9')
82
def test_bad_argument(self):
83
self.assertRaises(ValueError, self.module._py_unhexlify, u'1a')
84
self.assertRaises(ValueError, self.module._py_unhexlify, b'1b')
87
_hex_form = b'123456789012345678901234567890abcdefabcd'
90
class Test_KeyToSha1(TestBtreeSerializer):
92
def assertKeyToSha1(self, expected, key):
96
expected_bin = binascii.unhexlify(expected)
97
actual_sha1 = self.module._py_key_to_sha1(key)
98
if expected_bin != actual_sha1:
99
actual_hex_sha1 = None
100
if actual_sha1 is not None:
101
actual_hex_sha1 = binascii.hexlify(actual_sha1)
102
self.fail('_key_to_sha1 returned:\n %s\n != %s'
103
% (actual_sha1, expected))
105
def test_simple(self):
106
self.assertKeyToSha1(_hex_form, (b'sha1:' + _hex_form,))
108
def test_invalid_not_tuple(self):
109
self.assertKeyToSha1(None, _hex_form)
110
self.assertKeyToSha1(None, b'sha1:' + _hex_form)
112
def test_invalid_empty(self):
113
self.assertKeyToSha1(None, ())
115
def test_invalid_not_string(self):
116
self.assertKeyToSha1(None, (None,))
117
self.assertKeyToSha1(None, (list(_hex_form),))
119
def test_invalid_not_sha1(self):
120
self.assertKeyToSha1(None, (_hex_form,))
121
self.assertKeyToSha1(None, (b'sha2:' + _hex_form,))
123
def test_invalid_not_hex(self):
124
self.assertKeyToSha1(None,
125
(b'sha1:abcdefghijklmnopqrstuvwxyz12345678901234',))
128
class Test_Sha1ToKey(TestBtreeSerializer):
130
def assertSha1ToKey(self, hex_sha1):
131
bin_sha1 = binascii.unhexlify(hex_sha1)
132
key = self.module._py_sha1_to_key(bin_sha1)
133
self.assertEqual((b'sha1:' + hex_sha1,), key)
135
def test_simple(self):
136
self.assertSha1ToKey(_hex_form)
139
_one_key_content = b"""type=leaf
140
sha1:123456789012345678901234567890abcdefabcd\x00\x001 2 3 4
143
_large_offsets = b"""type=leaf
144
sha1:123456789012345678901234567890abcdefabcd\x00\x0012345678901 1234567890 0 1
145
sha1:abcd123456789012345678901234567890abcdef\x00\x002147483648 2147483647 0 1
146
sha1:abcdefabcd123456789012345678901234567890\x00\x004294967296 4294967295 4294967294 1
149
_multi_key_content = b"""type=leaf
150
sha1:c80c881d4a26984ddce795f6f71817c9cf4480e7\x00\x000 0 0 0
151
sha1:c86f7e437faa5a7fce15d1ddcb9eaeaea377667b\x00\x001 1 1 1
152
sha1:c8e240de74fb1ed08fa08d38063f6a6a91462a81\x00\x002 2 2 2
153
sha1:cda39a3ee5e6b4b0d3255bfef95601890afd8070\x00\x003 3 3 3
154
sha1:cdf51e37c269aa94d38f93e537bf6e2020b21406\x00\x004 4 4 4
155
sha1:ce0c9035898dd52fc65c41454cec9c4d2611bfb3\x00\x005 5 5 5
156
sha1:ce93b4e3c464ffd51732fbd6ded717e9efda28aa\x00\x006 6 6 6
157
sha1:cf7a9e24777ec23212c54d7a350bc5bea5477fdb\x00\x007 7 7 7
160
_multi_key_same_offset = b"""type=leaf
161
sha1:080c881d4a26984ddce795f6f71817c9cf4480e7\x00\x000 0 0 0
162
sha1:c86f7e437faa5a7fce15d1ddcb9eaeaea377667b\x00\x001 1 1 1
163
sha1:cd0c9035898dd52fc65c41454cec9c4d2611bfb3\x00\x002 2 2 2
164
sha1:cda39a3ee5e6b4b0d3255bfef95601890afd8070\x00\x003 3 3 3
165
sha1:cde240de74fb1ed08fa08d38063f6a6a91462a81\x00\x004 4 4 4
166
sha1:cdf51e37c269aa94d38f93e537bf6e2020b21406\x00\x005 5 5 5
167
sha1:ce7a9e24777ec23212c54d7a350bc5bea5477fdb\x00\x006 6 6 6
168
sha1:ce93b4e3c464ffd51732fbd6ded717e9efda28aa\x00\x007 7 7 7
171
_common_32_bits = b"""type=leaf
172
sha1:123456784a26984ddce795f6f71817c9cf4480e7\x00\x000 0 0 0
173
sha1:1234567874fb1ed08fa08d38063f6a6a91462a81\x00\x001 1 1 1
174
sha1:12345678777ec23212c54d7a350bc5bea5477fdb\x00\x002 2 2 2
175
sha1:123456787faa5a7fce15d1ddcb9eaeaea377667b\x00\x003 3 3 3
176
sha1:12345678898dd52fc65c41454cec9c4d2611bfb3\x00\x004 4 4 4
177
sha1:12345678c269aa94d38f93e537bf6e2020b21406\x00\x005 5 5 5
178
sha1:12345678c464ffd51732fbd6ded717e9efda28aa\x00\x006 6 6 6
179
sha1:12345678e5e6b4b0d3255bfef95601890afd8070\x00\x007 7 7 7
183
class TestGCCKHSHA1LeafNode(TestBtreeSerializer):
185
def assertInvalid(self, data):
186
"""Ensure that we get a proper error when trying to parse invalid bytes.
188
(mostly this is testing that bad input doesn't cause us to segfault)
191
(ValueError, TypeError), self.module._parse_into_chk, data, 1, 0)
193
def test_non_bytes(self):
194
self.assertInvalid(u'type=leaf\n')
196
def test_not_leaf(self):
197
self.assertInvalid(b'type=internal\n')
199
def test_empty_leaf(self):
200
leaf = self.module._parse_into_chk(b'type=leaf\n', 1, 0)
201
self.assertEqual(0, len(leaf))
202
self.assertEqual([], leaf.all_items())
203
self.assertEqual([], leaf.all_keys())
204
# It should allow any key to be queried
205
self.assertFalse(('key',) in leaf)
207
def test_one_key_leaf(self):
208
leaf = self.module._parse_into_chk(_one_key_content, 1, 0)
209
self.assertEqual(1, len(leaf))
210
sha_key = (b'sha1:' + _hex_form,)
211
self.assertEqual([sha_key], leaf.all_keys())
212
self.assertEqual([(sha_key, (b'1 2 3 4', ()))], leaf.all_items())
213
self.assertTrue(sha_key in leaf)
215
def test_large_offsets(self):
216
leaf = self.module._parse_into_chk(_large_offsets, 1, 0)
217
self.assertEqual([b'12345678901 1234567890 0 1',
218
b'2147483648 2147483647 0 1',
219
b'4294967296 4294967295 4294967294 1',
220
], [x[1][0] for x in leaf.all_items()])
222
def test_many_key_leaf(self):
223
leaf = self.module._parse_into_chk(_multi_key_content, 1, 0)
224
self.assertEqual(8, len(leaf))
225
all_keys = leaf.all_keys()
226
self.assertEqual(8, len(leaf.all_keys()))
227
for idx, key in enumerate(all_keys):
228
self.assertEqual(b'%d' % idx, leaf[key][0].split()[0])
230
def test_common_shift(self):
231
# The keys were deliberately chosen so that the first 5 bits all
232
# overlapped, it also happens that a later bit overlaps
233
# Note that by 'overlap' we mean that given bit is either on in all
234
# keys, or off in all keys
235
leaf = self.module._parse_into_chk(_multi_key_content, 1, 0)
236
self.assertEqual(19, leaf.common_shift)
237
# The interesting byte for each key is
238
# (defined as the 8-bits that come after the common prefix)
239
lst = [1, 13, 28, 180, 190, 193, 210, 239]
240
offsets = leaf._get_offsets()
241
self.assertEqual([bisect.bisect_left(lst, x) for x in range(0, 257)],
243
for idx, val in enumerate(lst):
244
self.assertEqual(idx, offsets[val])
245
for idx, key in enumerate(leaf.all_keys()):
246
self.assertEqual(b'%d' % idx, leaf[key][0].split()[0])
248
def test_multi_key_same_offset(self):
249
# there is no common prefix, though there are some common bits
250
leaf = self.module._parse_into_chk(_multi_key_same_offset, 1, 0)
251
self.assertEqual(24, leaf.common_shift)
252
offsets = leaf._get_offsets()
253
# The interesting byte is just the first 8-bits of the key
254
lst = [8, 200, 205, 205, 205, 205, 206, 206]
255
self.assertEqual([bisect.bisect_left(lst, x) for x in range(0, 257)],
258
self.assertEqual(lst.index(val), offsets[val])
259
for idx, key in enumerate(leaf.all_keys()):
260
self.assertEqual(b'%d' % idx, leaf[key][0].split()[0])
262
def test_all_common_prefix(self):
263
# The first 32 bits of all hashes are the same. This is going to be
264
# pretty much impossible, but I don't want to fail because of this
265
leaf = self.module._parse_into_chk(_common_32_bits, 1, 0)
266
self.assertEqual(0, leaf.common_shift)
268
offsets = leaf._get_offsets()
269
self.assertEqual([bisect.bisect_left(lst, x) for x in range(0, 257)],
272
self.assertEqual(lst.index(val), offsets[val])
273
for idx, key in enumerate(leaf.all_keys()):
274
self.assertEqual(b'%d' % idx, leaf[key][0].split()[0])
276
def test_many_entries(self):
277
# Again, this is almost impossible, but we should still work
278
# It would be hard to fit more that 120 entries in a 4k page, much less
279
# more than 256 of them. but hey, weird stuff happens sometimes
280
lines = [b'type=leaf\n']
282
key_str = b'sha1:%04x%s' % (i, _hex_form[:36])
284
lines.append(b'%s\0\0%d %d %d %d\n' % (key_str, i, i, i, i))
285
data = b''.join(lines)
286
leaf = self.module._parse_into_chk(data, 1, 0)
287
self.assertEqual(24 - 7, leaf.common_shift)
288
offsets = leaf._get_offsets()
289
# This is the interesting bits for each entry
290
lst = [x // 2 for x in range(500)]
291
expected_offsets = [x * 2 for x in range(128)] + [255] * 129
292
self.assertEqual(expected_offsets, offsets)
293
# We truncate because offsets is an unsigned char. So the bisection
294
# will just say 'greater than the last one' for all the rest
296
self.assertEqual([bisect.bisect_left(lst, x) for x in range(0, 257)],
299
self.assertEqual(lst.index(val), offsets[val])
300
for idx, key in enumerate(leaf.all_keys()):
301
self.assertEqual(b'%d' % idx, leaf[key][0].split()[0])
303
def test__sizeof__(self):
304
# We can't use the exact numbers because of platform variations, etc.
305
# But what we really care about is that it does get bigger with more
307
leaf0 = self.module._parse_into_chk(b'type=leaf\n', 1, 0)
308
leaf1 = self.module._parse_into_chk(_one_key_content, 1, 0)
309
leafN = self.module._parse_into_chk(_multi_key_content, 1, 0)
310
sizeof_1 = leaf1.__sizeof__() - leaf0.__sizeof__()
311
self.assertTrue(sizeof_1 > 0)
312
sizeof_N = leafN.__sizeof__() - leaf0.__sizeof__()
313
self.assertEqual(sizeof_1 * len(leafN), sizeof_N)