/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_knit.py

  • Committer: Robert Collins
  • Date: 2010-05-06 23:41:35 UTC
  • mto: This revision was merged to the branch mainline in revision 5223.
  • Revision ID: robertc@robertcollins.net-20100506234135-yivbzczw1sejxnxc
Lock methods on ``Tree``, ``Branch`` and ``Repository`` are now
expected to return an object which can be used to unlock them. This reduces
duplicate code when using cleanups. The previous 'tokens's returned by
``Branch.lock_write`` and ``Repository.lock_write`` are now attributes
on the result of the lock_write. ``repository.RepositoryWriteLockResult``
and ``branch.BranchWriteLockResult`` document this. (Robert Collins)

``log._get_info_for_log_files`` now takes an add_cleanup callable.
(Robert Collins)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
16
16
 
17
17
"""Tests for Knit data structure"""
18
18
 
 
19
from cStringIO import StringIO
 
20
import difflib
19
21
import gzip
20
 
from io import BytesIO
21
 
from patiencediff import PatienceSequenceMatcher
22
22
import sys
23
23
 
24
 
from .. import (
 
24
from bzrlib import (
25
25
    errors,
 
26
    generate_ids,
 
27
    knit,
26
28
    multiparent,
27
29
    osutils,
 
30
    pack,
28
31
    tests,
29
 
    transport,
30
 
    )
31
 
from ..bzr import (
32
 
    knit,
33
 
    pack,
34
 
    )
35
 
from ..bzr.index import *
36
 
from ..bzr.knit import (
 
32
    )
 
33
from bzrlib.errors import (
 
34
    RevisionAlreadyPresent,
 
35
    KnitHeaderError,
 
36
    RevisionNotPresent,
 
37
    NoSuchFile,
 
38
    )
 
39
from bzrlib.index import *
 
40
from bzrlib.knit import (
37
41
    AnnotatedKnitContent,
38
42
    KnitContent,
39
 
    KnitCorrupt,
40
 
    KnitDataStreamIncompatible,
41
 
    KnitDataStreamUnknown,
42
 
    KnitHeaderError,
43
 
    KnitIndexUnknownMethod,
 
43
    KnitSequenceMatcher,
44
44
    KnitVersionedFiles,
45
45
    PlainKnitContent,
46
46
    _VFContentMapGenerator,
 
47
    _DirectPackAccess,
47
48
    _KndxIndex,
48
49
    _KnitGraphIndex,
49
50
    _KnitKeyAccess,
50
51
    make_file_factory,
51
52
    )
52
 
from ..bzr import (
53
 
    knitpack_repo,
54
 
    pack_repo,
55
 
    )
56
 
from . import (
 
53
from bzrlib.repofmt import pack_repo
 
54
from bzrlib.tests import (
 
55
    Feature,
 
56
    KnownFailure,
57
57
    TestCase,
58
58
    TestCaseWithMemoryTransport,
59
59
    TestCaseWithTransport,
60
60
    TestNotApplicable,
61
61
    )
62
 
from ..bzr.versionedfile import (
 
62
from bzrlib.transport import get_transport
 
63
from bzrlib.transport.memory import MemoryTransport
 
64
from bzrlib.tuned_gzip import GzipFile
 
65
from bzrlib.versionedfile import (
63
66
    AbsentContentFactory,
64
67
    ConstantMapper,
65
68
    network_bytes_to_kind_and_offset,
66
69
    RecordingVersionedFilesDecorator,
67
70
    )
68
 
from . import (
69
 
    features,
70
 
    )
71
 
 
72
 
 
73
 
compiled_knit_feature = features.ModuleAvailableFeature(
74
 
    'breezy.bzr._knit_load_data_pyx')
75
 
 
76
 
 
77
 
class ErrorTests(TestCase):
78
 
 
79
 
    def test_knit_data_stream_incompatible(self):
80
 
        error = KnitDataStreamIncompatible(
81
 
            'stream format', 'target format')
82
 
        self.assertEqual('Cannot insert knit data stream of format '
83
 
                         '"stream format" into knit of format '
84
 
                         '"target format".', str(error))
85
 
 
86
 
    def test_knit_data_stream_unknown(self):
87
 
        error = KnitDataStreamUnknown(
88
 
            'stream format')
89
 
        self.assertEqual('Cannot parse knit data stream of format '
90
 
                         '"stream format".', str(error))
91
 
 
92
 
    def test_knit_header_error(self):
93
 
        error = KnitHeaderError('line foo\n', 'path/to/file')
94
 
        self.assertEqual("Knit header error: 'line foo\\n' unexpected"
95
 
                         " for file \"path/to/file\".", str(error))
96
 
 
97
 
    def test_knit_index_unknown_method(self):
98
 
        error = KnitIndexUnknownMethod('http://host/foo.kndx',
99
 
                                       ['bad', 'no-eol'])
100
 
        self.assertEqual("Knit index http://host/foo.kndx does not have a"
101
 
                         " known method in options: ['bad', 'no-eol']",
102
 
                         str(error))
 
71
 
 
72
 
 
73
compiled_knit_feature = tests.ModuleAvailableFeature(
 
74
                            'bzrlib._knit_load_data_pyx')
103
75
 
104
76
 
105
77
class KnitContentTestsMixin(object):
111
83
        content = self._make_content([])
112
84
        self.assertEqual(content.text(), [])
113
85
 
114
 
        content = self._make_content(
115
 
            [(b"origin1", b"text1"), (b"origin2", b"text2")])
116
 
        self.assertEqual(content.text(), [b"text1", b"text2"])
 
86
        content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
 
87
        self.assertEqual(content.text(), ["text1", "text2"])
117
88
 
118
89
    def test_copy(self):
119
 
        content = self._make_content(
120
 
            [(b"origin1", b"text1"), (b"origin2", b"text2")])
 
90
        content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
121
91
        copy = content.copy()
122
92
        self.assertIsInstance(copy, content.__class__)
123
93
        self.assertEqual(copy.annotate(), content.annotate())
126
96
        """Assert that the derived matching blocks match real output"""
127
97
        source_lines = source.splitlines(True)
128
98
        target_lines = target.splitlines(True)
129
 
 
130
99
        def nl(line):
131
100
            if noeol and not line.endswith('\n'):
132
101
                return line + '\n'
133
102
            else:
134
103
                return line
135
 
        source_content = self._make_content(
136
 
            [(None, nl(l)) for l in source_lines])
137
 
        target_content = self._make_content(
138
 
            [(None, nl(l)) for l in target_lines])
 
104
        source_content = self._make_content([(None, nl(l)) for l in source_lines])
 
105
        target_content = self._make_content([(None, nl(l)) for l in target_lines])
139
106
        line_delta = source_content.line_delta(target_content)
140
107
        delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
141
 
                                                              source_lines, target_lines))
142
 
        matcher = PatienceSequenceMatcher(None, source_lines, target_lines)
143
 
        matcher_blocks = list(matcher.get_matching_blocks())
 
108
            source_lines, target_lines))
 
109
        matcher = KnitSequenceMatcher(None, source_lines, target_lines)
 
110
        matcher_blocks = list(list(matcher.get_matching_blocks()))
144
111
        self.assertEqual(matcher_blocks, delta_blocks)
145
112
 
146
113
    def test_get_line_delta_blocks(self):
225
192
        content = self._make_content([])
226
193
        self.assertEqual(content.annotate(), [])
227
194
 
228
 
        content = self._make_content(
229
 
            [("origin1", "text1"), ("origin2", "text2")])
 
195
        content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
230
196
        self.assertEqual(content.annotate(),
231
 
                         [("bogus", "text1"), ("bogus", "text2")])
 
197
            [("bogus", "text1"), ("bogus", "text2")])
232
198
 
233
199
    def test_line_delta(self):
234
200
        content1 = self._make_content([("", "a"), ("", "b")])
235
201
        content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
236
202
        self.assertEqual(content1.line_delta(content2),
237
 
                         [(1, 2, 2, ["a", "c"])])
 
203
            [(1, 2, 2, ["a", "c"])])
238
204
 
239
205
    def test_line_delta_iter(self):
240
206
        content1 = self._make_content([("", "a"), ("", "b")])
241
207
        content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
242
208
        it = content1.line_delta_iter(content2)
243
 
        self.assertEqual(next(it), (1, 2, 2, ["a", "c"]))
244
 
        self.assertRaises(StopIteration, next, it)
 
209
        self.assertEqual(it.next(), (1, 2, 2, ["a", "c"]))
 
210
        self.assertRaises(StopIteration, it.next)
245
211
 
246
212
 
247
213
class TestAnnotatedKnitContent(TestCase, KnitContentTestsMixin):
253
219
        content = self._make_content([])
254
220
        self.assertEqual(content.annotate(), [])
255
221
 
256
 
        content = self._make_content(
257
 
            [(b"origin1", b"text1"), (b"origin2", b"text2")])
 
222
        content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
258
223
        self.assertEqual(content.annotate(),
259
 
                         [(b"origin1", b"text1"), (b"origin2", b"text2")])
 
224
            [("origin1", "text1"), ("origin2", "text2")])
260
225
 
261
226
    def test_line_delta(self):
262
227
        content1 = self._make_content([("", "a"), ("", "b")])
263
228
        content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
264
229
        self.assertEqual(content1.line_delta(content2),
265
 
                         [(1, 2, 2, [("", "a"), ("", "c")])])
 
230
            [(1, 2, 2, [("", "a"), ("", "c")])])
266
231
 
267
232
    def test_line_delta_iter(self):
268
233
        content1 = self._make_content([("", "a"), ("", "b")])
269
234
        content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
270
235
        it = content1.line_delta_iter(content2)
271
 
        self.assertEqual(next(it), (1, 2, 2, [("", "a"), ("", "c")]))
272
 
        self.assertRaises(StopIteration, next, it)
 
236
        self.assertEqual(it.next(), (1, 2, 2, [("", "a"), ("", "c")]))
 
237
        self.assertRaises(StopIteration, it.next)
273
238
 
274
239
 
275
240
class MockTransport(object):
282
247
 
283
248
    def get(self, filename):
284
249
        if self.file_lines is None:
285
 
            raise errors.NoSuchFile(filename)
 
250
            raise NoSuchFile(filename)
286
251
        else:
287
 
            return BytesIO(b"\n".join(self.file_lines))
 
252
            return StringIO("\n".join(self.file_lines))
288
253
 
289
254
    def readv(self, relpath, offsets):
290
255
        fp = self.get(relpath)
320
285
    """Tests for getting and putting knit records."""
321
286
 
322
287
    def test_add_raw_records(self):
323
 
        """add_raw_records adds records retrievable later."""
324
 
        access = self.get_access()
325
 
        memos = access.add_raw_records([(b'key', 10)], [b'1234567890'])
326
 
        self.assertEqual([b'1234567890'], list(access.get_raw_records(memos)))
327
 
 
328
 
    def test_add_raw_record(self):
329
 
        """add_raw_record adds records retrievable later."""
330
 
        access = self.get_access()
331
 
        memos = access.add_raw_record(b'key', 10, [b'1234567890'])
332
 
        self.assertEqual([b'1234567890'], list(access.get_raw_records([memos])))
 
288
        """Add_raw_records adds records retrievable later."""
 
289
        access = self.get_access()
 
290
        memos = access.add_raw_records([('key', 10)], '1234567890')
 
291
        self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
333
292
 
334
293
    def test_add_several_raw_records(self):
335
294
        """add_raw_records with many records and read some back."""
336
295
        access = self.get_access()
337
 
        memos = access.add_raw_records([(b'key', 10), (b'key2', 2), (b'key3', 5)],
338
 
                                       [b'12345678901234567'])
339
 
        self.assertEqual([b'1234567890', b'12', b'34567'],
340
 
                         list(access.get_raw_records(memos)))
341
 
        self.assertEqual([b'1234567890'],
342
 
                         list(access.get_raw_records(memos[0:1])))
343
 
        self.assertEqual([b'12'],
344
 
                         list(access.get_raw_records(memos[1:2])))
345
 
        self.assertEqual([b'34567'],
346
 
                         list(access.get_raw_records(memos[2:3])))
347
 
        self.assertEqual([b'1234567890', b'34567'],
348
 
                         list(access.get_raw_records(memos[0:1] + memos[2:3])))
 
296
        memos = access.add_raw_records([('key', 10), ('key2', 2), ('key3', 5)],
 
297
            '12345678901234567')
 
298
        self.assertEqual(['1234567890', '12', '34567'],
 
299
            list(access.get_raw_records(memos)))
 
300
        self.assertEqual(['1234567890'],
 
301
            list(access.get_raw_records(memos[0:1])))
 
302
        self.assertEqual(['12'],
 
303
            list(access.get_raw_records(memos[1:2])))
 
304
        self.assertEqual(['34567'],
 
305
            list(access.get_raw_records(memos[2:3])))
 
306
        self.assertEqual(['1234567890', '34567'],
 
307
            list(access.get_raw_records(memos[0:1] + memos[2:3])))
349
308
 
350
309
 
351
310
class TestKnitKnitAccess(TestCaseWithMemoryTransport, KnitRecordAccessTestsMixin):
370
329
 
371
330
    def _get_access(self, packname='packfile', index='FOO'):
372
331
        transport = self.get_transport()
373
 
 
374
332
        def write_data(bytes):
375
333
            transport.append_bytes(packname, bytes)
376
334
        writer = pack.ContainerWriter(write_data)
377
335
        writer.begin()
378
 
        access = pack_repo._DirectPackAccess({})
 
336
        access = _DirectPackAccess({})
379
337
        access.set_writer(writer, index, (transport, packname))
380
338
        return access, writer
381
339
 
383
341
        """Create a pack file with 2 records."""
384
342
        access, writer = self._get_access(packname='packname', index='foo')
385
343
        memos = []
386
 
        memos.extend(access.add_raw_records([(b'key1', 10)], [b'1234567890']))
387
 
        memos.extend(access.add_raw_records([(b'key2', 5)], [b'12345']))
 
344
        memos.extend(access.add_raw_records([('key1', 10)], '1234567890'))
 
345
        memos.extend(access.add_raw_records([('key2', 5)], '12345'))
388
346
        writer.end()
389
347
        return memos
390
348
 
391
 
    def test_pack_collection_pack_retries(self):
392
 
        """An explicit pack of a pack collection succeeds even when a
393
 
        concurrent pack happens.
394
 
        """
395
 
        builder = self.make_branch_builder('.')
396
 
        builder.start_series()
397
 
        builder.build_snapshot(None, [
398
 
            ('add', ('', b'root-id', 'directory', None)),
399
 
            ('add', ('file', b'file-id', 'file', b'content\nrev 1\n')),
400
 
            ], revision_id=b'rev-1')
401
 
        builder.build_snapshot([b'rev-1'], [
402
 
            ('modify', ('file', b'content\nrev 2\n')),
403
 
            ], revision_id=b'rev-2')
404
 
        builder.build_snapshot([b'rev-2'], [
405
 
            ('modify', ('file', b'content\nrev 3\n')),
406
 
            ], revision_id=b'rev-3')
407
 
        self.addCleanup(builder.finish_series)
408
 
        b = builder.get_branch()
409
 
        self.addCleanup(b.lock_write().unlock)
410
 
        repo = b.repository
411
 
        collection = repo._pack_collection
412
 
        # Concurrently repack the repo.
413
 
        reopened_repo = repo.controldir.open_repository()
414
 
        reopened_repo.pack()
415
 
        # Pack the new pack.
416
 
        collection.pack()
417
 
 
418
349
    def make_vf_for_retrying(self):
419
350
        """Create 3 packs and a reload function.
420
351
 
427
358
        """
428
359
        builder = self.make_branch_builder('.', format="1.9")
429
360
        builder.start_series()
430
 
        builder.build_snapshot(None, [
431
 
            ('add', ('', b'root-id', 'directory', None)),
432
 
            ('add', ('file', b'file-id', 'file', b'content\nrev 1\n')),
433
 
            ], revision_id=b'rev-1')
434
 
        builder.build_snapshot([b'rev-1'], [
435
 
            ('modify', ('file', b'content\nrev 2\n')),
436
 
            ], revision_id=b'rev-2')
437
 
        builder.build_snapshot([b'rev-2'], [
438
 
            ('modify', ('file', b'content\nrev 3\n')),
439
 
            ], revision_id=b'rev-3')
 
361
        builder.build_snapshot('rev-1', None, [
 
362
            ('add', ('', 'root-id', 'directory', None)),
 
363
            ('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
 
364
            ])
 
365
        builder.build_snapshot('rev-2', ['rev-1'], [
 
366
            ('modify', ('file-id', 'content\nrev 2\n')),
 
367
            ])
 
368
        builder.build_snapshot('rev-3', ['rev-2'], [
 
369
            ('modify', ('file-id', 'content\nrev 3\n')),
 
370
            ])
440
371
        builder.finish_series()
441
372
        b = builder.get_branch()
442
373
        b.lock_write()
447
378
        collection = repo._pack_collection
448
379
        collection.ensure_loaded()
449
380
        orig_packs = collection.packs
450
 
        packer = knitpack_repo.KnitPacker(collection, orig_packs, '.testpack')
 
381
        packer = pack_repo.Packer(collection, orig_packs, '.testpack')
451
382
        new_pack = packer.pack()
452
383
        # forget about the new pack
453
384
        collection.reset()
457
388
        new_index = new_pack.revision_index
458
389
        access_tuple = new_pack.access_tuple()
459
390
        reload_counter = [0, 0, 0]
460
 
 
461
391
        def reload():
462
392
            reload_counter[0] += 1
463
393
            if reload_counter[1] > 0:
480
410
 
481
411
    def make_reload_func(self, return_val=True):
482
412
        reload_called = [0]
483
 
 
484
413
        def reload():
485
414
            reload_called[0] += 1
486
415
            return return_val
491
420
        # populated
492
421
        try:
493
422
            raise _TestException('foobar')
494
 
        except _TestException as e:
 
423
        except _TestException, e:
495
424
            retry_exc = errors.RetryWithNewPacks(None, reload_occurred=False,
496
425
                                                 exc_info=sys.exc_info())
497
 
        # GZ 2010-08-10: Cycle with exc_info affects 3 tests
498
426
        return retry_exc
499
427
 
500
428
    def test_read_from_several_packs(self):
501
429
        access, writer = self._get_access()
502
430
        memos = []
503
 
        memos.extend(access.add_raw_records([(b'key', 10)], [b'1234567890']))
 
431
        memos.extend(access.add_raw_records([('key', 10)], '1234567890'))
504
432
        writer.end()
505
433
        access, writer = self._get_access('pack2', 'FOOBAR')
506
 
        memos.extend(access.add_raw_records([(b'key', 5)], [b'12345']))
 
434
        memos.extend(access.add_raw_records([('key', 5)], '12345'))
507
435
        writer.end()
508
436
        access, writer = self._get_access('pack3', 'BAZ')
509
 
        memos.extend(access.add_raw_records([(b'key', 5)], [b'alpha']))
 
437
        memos.extend(access.add_raw_records([('key', 5)], 'alpha'))
510
438
        writer.end()
511
439
        transport = self.get_transport()
512
 
        access = pack_repo._DirectPackAccess({"FOO": (transport, 'packfile'),
513
 
                                              "FOOBAR": (transport, 'pack2'),
514
 
                                              "BAZ": (transport, 'pack3')})
515
 
        self.assertEqual([b'1234567890', b'12345', b'alpha'],
516
 
                         list(access.get_raw_records(memos)))
517
 
        self.assertEqual([b'1234567890'],
518
 
                         list(access.get_raw_records(memos[0:1])))
519
 
        self.assertEqual([b'12345'],
520
 
                         list(access.get_raw_records(memos[1:2])))
521
 
        self.assertEqual([b'alpha'],
522
 
                         list(access.get_raw_records(memos[2:3])))
523
 
        self.assertEqual([b'1234567890', b'alpha'],
524
 
                         list(access.get_raw_records(memos[0:1] + memos[2:3])))
 
440
        access = _DirectPackAccess({"FOO":(transport, 'packfile'),
 
441
            "FOOBAR":(transport, 'pack2'),
 
442
            "BAZ":(transport, 'pack3')})
 
443
        self.assertEqual(['1234567890', '12345', 'alpha'],
 
444
            list(access.get_raw_records(memos)))
 
445
        self.assertEqual(['1234567890'],
 
446
            list(access.get_raw_records(memos[0:1])))
 
447
        self.assertEqual(['12345'],
 
448
            list(access.get_raw_records(memos[1:2])))
 
449
        self.assertEqual(['alpha'],
 
450
            list(access.get_raw_records(memos[2:3])))
 
451
        self.assertEqual(['1234567890', 'alpha'],
 
452
            list(access.get_raw_records(memos[0:1] + memos[2:3])))
525
453
 
526
454
    def test_set_writer(self):
527
455
        """The writer should be settable post construction."""
528
 
        access = pack_repo._DirectPackAccess({})
 
456
        access = _DirectPackAccess({})
529
457
        transport = self.get_transport()
530
458
        packname = 'packfile'
531
459
        index = 'foo'
532
 
 
533
460
        def write_data(bytes):
534
461
            transport.append_bytes(packname, bytes)
535
462
        writer = pack.ContainerWriter(write_data)
536
463
        writer.begin()
537
464
        access.set_writer(writer, index, (transport, packname))
538
 
        memos = access.add_raw_records([(b'key', 10)], [b'1234567890'])
 
465
        memos = access.add_raw_records([('key', 10)], '1234567890')
539
466
        writer.end()
540
 
        self.assertEqual([b'1234567890'], list(access.get_raw_records(memos)))
 
467
        self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
541
468
 
542
469
    def test_missing_index_raises_retry(self):
543
470
        memos = self.make_pack_file()
544
471
        transport = self.get_transport()
545
472
        reload_called, reload_func = self.make_reload_func()
546
473
        # Note that the index key has changed from 'foo' to 'bar'
547
 
        access = pack_repo._DirectPackAccess({'bar': (transport, 'packname')},
548
 
                                             reload_func=reload_func)
 
474
        access = _DirectPackAccess({'bar':(transport, 'packname')},
 
475
                                   reload_func=reload_func)
549
476
        e = self.assertListRaises(errors.RetryWithNewPacks,
550
477
                                  access.get_raw_records, memos)
551
478
        # Because a key was passed in which does not match our index list, we
559
486
        memos = self.make_pack_file()
560
487
        transport = self.get_transport()
561
488
        # Note that the index key has changed from 'foo' to 'bar'
562
 
        access = pack_repo._DirectPackAccess({'bar': (transport, 'packname')})
 
489
        access = _DirectPackAccess({'bar':(transport, 'packname')})
563
490
        e = self.assertListRaises(KeyError, access.get_raw_records, memos)
564
491
 
565
492
    def test_missing_file_raises_retry(self):
567
494
        transport = self.get_transport()
568
495
        reload_called, reload_func = self.make_reload_func()
569
496
        # Note that the 'filename' has been changed to 'different-packname'
570
 
        access = pack_repo._DirectPackAccess(
571
 
            {'foo': (transport, 'different-packname')},
572
 
            reload_func=reload_func)
 
497
        access = _DirectPackAccess({'foo':(transport, 'different-packname')},
 
498
                                   reload_func=reload_func)
573
499
        e = self.assertListRaises(errors.RetryWithNewPacks,
574
500
                                  access.get_raw_records, memos)
575
501
        # The file has gone missing, so we assume we need to reload
583
509
        memos = self.make_pack_file()
584
510
        transport = self.get_transport()
585
511
        # Note that the 'filename' has been changed to 'different-packname'
586
 
        access = pack_repo._DirectPackAccess(
587
 
            {'foo': (transport, 'different-packname')})
 
512
        access = _DirectPackAccess({'foo':(transport, 'different-packname')})
588
513
        e = self.assertListRaises(errors.NoSuchFile,
589
514
                                  access.get_raw_records, memos)
590
515
 
592
517
        memos = self.make_pack_file()
593
518
        transport = self.get_transport()
594
519
        failing_transport = MockReadvFailingTransport(
595
 
            [transport.get_bytes('packname')])
 
520
                                [transport.get_bytes('packname')])
596
521
        reload_called, reload_func = self.make_reload_func()
597
 
        access = pack_repo._DirectPackAccess(
598
 
            {'foo': (failing_transport, 'packname')},
599
 
            reload_func=reload_func)
 
522
        access = _DirectPackAccess({'foo':(failing_transport, 'packname')},
 
523
                                   reload_func=reload_func)
600
524
        # Asking for a single record will not trigger the Mock failure
601
 
        self.assertEqual([b'1234567890'],
602
 
                         list(access.get_raw_records(memos[:1])))
603
 
        self.assertEqual([b'12345'],
604
 
                         list(access.get_raw_records(memos[1:2])))
 
525
        self.assertEqual(['1234567890'],
 
526
            list(access.get_raw_records(memos[:1])))
 
527
        self.assertEqual(['12345'],
 
528
            list(access.get_raw_records(memos[1:2])))
605
529
        # A multiple offset readv() will fail mid-way through
606
530
        e = self.assertListRaises(errors.RetryWithNewPacks,
607
531
                                  access.get_raw_records, memos)
616
540
        memos = self.make_pack_file()
617
541
        transport = self.get_transport()
618
542
        failing_transport = MockReadvFailingTransport(
619
 
            [transport.get_bytes('packname')])
 
543
                                [transport.get_bytes('packname')])
620
544
        reload_called, reload_func = self.make_reload_func()
621
 
        access = pack_repo._DirectPackAccess(
622
 
            {'foo': (failing_transport, 'packname')})
 
545
        access = _DirectPackAccess({'foo':(failing_transport, 'packname')})
623
546
        # Asking for a single record will not trigger the Mock failure
624
 
        self.assertEqual([b'1234567890'],
625
 
                         list(access.get_raw_records(memos[:1])))
626
 
        self.assertEqual([b'12345'],
627
 
                         list(access.get_raw_records(memos[1:2])))
 
547
        self.assertEqual(['1234567890'],
 
548
            list(access.get_raw_records(memos[:1])))
 
549
        self.assertEqual(['12345'],
 
550
            list(access.get_raw_records(memos[1:2])))
628
551
        # A multiple offset readv() will fail mid-way through
629
552
        e = self.assertListRaises(errors.NoSuchFile,
630
553
                                  access.get_raw_records, memos)
631
554
 
632
555
    def test_reload_or_raise_no_reload(self):
633
 
        access = pack_repo._DirectPackAccess({}, reload_func=None)
 
556
        access = _DirectPackAccess({}, reload_func=None)
634
557
        retry_exc = self.make_retry_exception()
635
558
        # Without a reload_func, we will just re-raise the original exception
636
559
        self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
637
560
 
638
561
    def test_reload_or_raise_reload_changed(self):
639
562
        reload_called, reload_func = self.make_reload_func(return_val=True)
640
 
        access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
 
563
        access = _DirectPackAccess({}, reload_func=reload_func)
641
564
        retry_exc = self.make_retry_exception()
642
565
        access.reload_or_raise(retry_exc)
643
566
        self.assertEqual([1], reload_called)
644
 
        retry_exc.reload_occurred = True
 
567
        retry_exc.reload_occurred=True
645
568
        access.reload_or_raise(retry_exc)
646
569
        self.assertEqual([2], reload_called)
647
570
 
648
571
    def test_reload_or_raise_reload_no_change(self):
649
572
        reload_called, reload_func = self.make_reload_func(return_val=False)
650
 
        access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
 
573
        access = _DirectPackAccess({}, reload_func=reload_func)
651
574
        retry_exc = self.make_retry_exception()
652
575
        # If reload_occurred is False, then we consider it an error to have
653
576
        # reload_func() return False (no changes).
654
577
        self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
655
578
        self.assertEqual([1], reload_called)
656
 
        retry_exc.reload_occurred = True
 
579
        retry_exc.reload_occurred=True
657
580
        # If reload_occurred is True, then we assume nothing changed because
658
581
        # it had changed earlier, but didn't change again
659
582
        access.reload_or_raise(retry_exc)
663
586
        vf, reload_counter = self.make_vf_for_retrying()
664
587
        # It is a little bit bogus to annotate the Revision VF, but it works,
665
588
        # as we have ancestry stored there
666
 
        key = (b'rev-3',)
 
589
        key = ('rev-3',)
667
590
        reload_lines = vf.annotate(key)
668
591
        self.assertEqual([1, 1, 0], reload_counter)
669
592
        plain_lines = vf.annotate(key)
670
 
        self.assertEqual([1, 1, 0], reload_counter)  # No extra reloading
 
593
        self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
671
594
        if reload_lines != plain_lines:
672
595
            self.fail('Annotation was not identical with reloading.')
673
596
        # Now delete the packs-in-use, which should trigger another reload, but
674
597
        # this time we just raise an exception because we can't recover
675
 
        for trans, name in vf._access._indices.values():
 
598
        for trans, name in vf._access._indices.itervalues():
676
599
            trans.delete(name)
677
600
        self.assertRaises(errors.NoSuchFile, vf.annotate, key)
678
601
        self.assertEqual([2, 1, 1], reload_counter)
679
602
 
680
603
    def test__get_record_map_retries(self):
681
604
        vf, reload_counter = self.make_vf_for_retrying()
682
 
        keys = [(b'rev-1',), (b'rev-2',), (b'rev-3',)]
 
605
        keys = [('rev-1',), ('rev-2',), ('rev-3',)]
683
606
        records = vf._get_record_map(keys)
684
607
        self.assertEqual(keys, sorted(records.keys()))
685
608
        self.assertEqual([1, 1, 0], reload_counter)
686
609
        # Now delete the packs-in-use, which should trigger another reload, but
687
610
        # this time we just raise an exception because we can't recover
688
 
        for trans, name in vf._access._indices.values():
 
611
        for trans, name in vf._access._indices.itervalues():
689
612
            trans.delete(name)
690
613
        self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
691
614
        self.assertEqual([2, 1, 1], reload_counter)
692
615
 
693
616
    def test_get_record_stream_retries(self):
694
617
        vf, reload_counter = self.make_vf_for_retrying()
695
 
        keys = [(b'rev-1',), (b'rev-2',), (b'rev-3',)]
 
618
        keys = [('rev-1',), ('rev-2',), ('rev-3',)]
696
619
        record_stream = vf.get_record_stream(keys, 'topological', False)
697
 
        record = next(record_stream)
698
 
        self.assertEqual((b'rev-1',), record.key)
 
620
        record = record_stream.next()
 
621
        self.assertEqual(('rev-1',), record.key)
699
622
        self.assertEqual([0, 0, 0], reload_counter)
700
 
        record = next(record_stream)
701
 
        self.assertEqual((b'rev-2',), record.key)
 
623
        record = record_stream.next()
 
624
        self.assertEqual(('rev-2',), record.key)
702
625
        self.assertEqual([1, 1, 0], reload_counter)
703
 
        record = next(record_stream)
704
 
        self.assertEqual((b'rev-3',), record.key)
 
626
        record = record_stream.next()
 
627
        self.assertEqual(('rev-3',), record.key)
705
628
        self.assertEqual([1, 1, 0], reload_counter)
706
629
        # Now delete all pack files, and see that we raise the right error
707
 
        for trans, name in vf._access._indices.values():
 
630
        for trans, name in vf._access._indices.itervalues():
708
631
            trans.delete(name)
709
632
        self.assertListRaises(errors.NoSuchFile,
710
 
                              vf.get_record_stream, keys, 'topological', False)
 
633
            vf.get_record_stream, keys, 'topological', False)
711
634
 
712
635
    def test_iter_lines_added_or_present_in_keys_retries(self):
713
636
        vf, reload_counter = self.make_vf_for_retrying()
714
 
        keys = [(b'rev-1',), (b'rev-2',), (b'rev-3',)]
 
637
        keys = [('rev-1',), ('rev-2',), ('rev-3',)]
715
638
        # Unfortunately, iter_lines_added_or_present_in_keys iterates the
716
639
        # result in random order (determined by the iteration order from a
717
640
        # set()), so we don't have any solid way to trigger whether data is
724
647
        self.assertEqual([1, 1, 0], reload_counter)
725
648
        # Now do it again, to make sure the result is equivalent
726
649
        plain_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
727
 
        self.assertEqual([1, 1, 0], reload_counter)  # No extra reloading
 
650
        self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
728
651
        self.assertEqual(plain_lines, reload_lines)
729
652
        self.assertEqual(21, len(plain_lines))
730
653
        # Now delete all pack files, and see that we raise the right error
731
 
        for trans, name in vf._access._indices.values():
 
654
        for trans, name in vf._access._indices.itervalues():
732
655
            trans.delete(name)
733
656
        self.assertListRaises(errors.NoSuchFile,
734
 
                              vf.iter_lines_added_or_present_in_keys, keys)
 
657
            vf.iter_lines_added_or_present_in_keys, keys)
735
658
        self.assertEqual([2, 1, 1], reload_counter)
736
659
 
737
660
    def test_get_record_stream_yields_disk_sorted_order(self):
742
665
        self.addCleanup(repo.unlock)
743
666
        repo.start_write_group()
744
667
        vf = repo.texts
745
 
        vf.add_lines((b'f-id', b'rev-5'), [(b'f-id', b'rev-4')], [b'lines\n'])
746
 
        vf.add_lines((b'f-id', b'rev-1'), [], [b'lines\n'])
747
 
        vf.add_lines((b'f-id', b'rev-2'), [(b'f-id', b'rev-1')], [b'lines\n'])
 
668
        vf.add_lines(('f-id', 'rev-5'), [('f-id', 'rev-4')], ['lines\n'])
 
669
        vf.add_lines(('f-id', 'rev-1'), [], ['lines\n'])
 
670
        vf.add_lines(('f-id', 'rev-2'), [('f-id', 'rev-1')], ['lines\n'])
748
671
        repo.commit_write_group()
749
672
        # We inserted them as rev-5, rev-1, rev-2, we should get them back in
750
673
        # the same order
751
 
        stream = vf.get_record_stream([(b'f-id', b'rev-1'), (b'f-id', b'rev-5'),
752
 
                                       (b'f-id', b'rev-2')], 'unordered', False)
 
674
        stream = vf.get_record_stream([('f-id', 'rev-1'), ('f-id', 'rev-5'),
 
675
                                       ('f-id', 'rev-2')], 'unordered', False)
753
676
        keys = [r.key for r in stream]
754
 
        self.assertEqual([(b'f-id', b'rev-5'), (b'f-id', b'rev-1'),
755
 
                          (b'f-id', b'rev-2')], keys)
 
677
        self.assertEqual([('f-id', 'rev-5'), ('f-id', 'rev-1'),
 
678
                          ('f-id', 'rev-2')], keys)
756
679
        repo.start_write_group()
757
 
        vf.add_lines((b'f-id', b'rev-4'), [(b'f-id', b'rev-3')], [b'lines\n'])
758
 
        vf.add_lines((b'f-id', b'rev-3'), [(b'f-id', b'rev-2')], [b'lines\n'])
759
 
        vf.add_lines((b'f-id', b'rev-6'), [(b'f-id', b'rev-5')], [b'lines\n'])
 
680
        vf.add_lines(('f-id', 'rev-4'), [('f-id', 'rev-3')], ['lines\n'])
 
681
        vf.add_lines(('f-id', 'rev-3'), [('f-id', 'rev-2')], ['lines\n'])
 
682
        vf.add_lines(('f-id', 'rev-6'), [('f-id', 'rev-5')], ['lines\n'])
760
683
        repo.commit_write_group()
761
684
        # Request in random order, to make sure the output order isn't based on
762
685
        # the request
763
 
        request_keys = set((b'f-id', b'rev-%d' % i) for i in range(1, 7))
 
686
        request_keys = set(('f-id', 'rev-%d' % i) for i in range(1, 7))
764
687
        stream = vf.get_record_stream(request_keys, 'unordered', False)
765
688
        keys = [r.key for r in stream]
766
689
        # We want to get the keys back in disk order, but it doesn't matter
767
690
        # which pack we read from first. So this can come back in 2 orders
768
 
        alt1 = [(b'f-id', b'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
769
 
        alt2 = [(b'f-id', b'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
 
691
        alt1 = [('f-id', 'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
 
692
        alt2 = [('f-id', 'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
770
693
        if keys != alt1 and keys != alt2:
771
694
            self.fail('Returned key order did not match either expected order.'
772
695
                      ' expected %s or %s, not %s'
776
699
class LowLevelKnitDataTests(TestCase):
777
700
 
778
701
    def create_gz_content(self, text):
779
 
        sio = BytesIO()
780
 
        with gzip.GzipFile(mode='wb', fileobj=sio) as gz_file:
781
 
            gz_file.write(text)
 
702
        sio = StringIO()
 
703
        gz_file = gzip.GzipFile(mode='wb', fileobj=sio)
 
704
        gz_file.write(text)
 
705
        gz_file.close()
782
706
        return sio.getvalue()
783
707
 
784
708
    def make_multiple_records(self):
785
709
        """Create the content for multiple records."""
786
 
        sha1sum = osutils.sha_string(b'foo\nbar\n')
 
710
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
787
711
        total_txt = []
788
 
        gz_txt = self.create_gz_content(b'version rev-id-1 2 %s\n'
789
 
                                        b'foo\n'
790
 
                                        b'bar\n'
791
 
                                        b'end rev-id-1\n'
 
712
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
 
713
                                        'foo\n'
 
714
                                        'bar\n'
 
715
                                        'end rev-id-1\n'
792
716
                                        % (sha1sum,))
793
717
        record_1 = (0, len(gz_txt), sha1sum)
794
718
        total_txt.append(gz_txt)
795
 
        sha1sum = osutils.sha_string(b'baz\n')
796
 
        gz_txt = self.create_gz_content(b'version rev-id-2 1 %s\n'
797
 
                                        b'baz\n'
798
 
                                        b'end rev-id-2\n'
 
719
        sha1sum = osutils.sha('baz\n').hexdigest()
 
720
        gz_txt = self.create_gz_content('version rev-id-2 1 %s\n'
 
721
                                        'baz\n'
 
722
                                        'end rev-id-2\n'
799
723
                                        % (sha1sum,))
800
724
        record_2 = (record_1[1], len(gz_txt), sha1sum)
801
725
        total_txt.append(gz_txt)
802
726
        return total_txt, record_1, record_2
803
727
 
804
728
    def test_valid_knit_data(self):
805
 
        sha1sum = osutils.sha_string(b'foo\nbar\n')
806
 
        gz_txt = self.create_gz_content(b'version rev-id-1 2 %s\n'
807
 
                                        b'foo\n'
808
 
                                        b'bar\n'
809
 
                                        b'end rev-id-1\n'
 
729
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
 
730
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
 
731
                                        'foo\n'
 
732
                                        'bar\n'
 
733
                                        'end rev-id-1\n'
810
734
                                        % (sha1sum,))
811
735
        transport = MockTransport([gz_txt])
812
736
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
813
737
        knit = KnitVersionedFiles(None, access)
814
 
        records = [((b'rev-id-1',), ((b'rev-id-1',), 0, len(gz_txt)))]
 
738
        records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
815
739
 
816
740
        contents = list(knit._read_records_iter(records))
817
 
        self.assertEqual([((b'rev-id-1',), [b'foo\n', b'bar\n'],
818
 
                           b'4e48e2c9a3d2ca8a708cb0cc545700544efb5021')], contents)
 
741
        self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'],
 
742
            '4e48e2c9a3d2ca8a708cb0cc545700544efb5021')], contents)
819
743
 
820
744
        raw_contents = list(knit._read_records_iter_raw(records))
821
 
        self.assertEqual([((b'rev-id-1',), gz_txt, sha1sum)], raw_contents)
 
745
        self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
822
746
 
823
747
    def test_multiple_records_valid(self):
824
748
        total_txt, record_1, record_2 = self.make_multiple_records()
825
 
        transport = MockTransport([b''.join(total_txt)])
 
749
        transport = MockTransport([''.join(total_txt)])
826
750
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
827
751
        knit = KnitVersionedFiles(None, access)
828
 
        records = [((b'rev-id-1',), ((b'rev-id-1',), record_1[0], record_1[1])),
829
 
                   ((b'rev-id-2',), ((b'rev-id-2',), record_2[0], record_2[1]))]
 
752
        records = [(('rev-id-1',), (('rev-id-1',), record_1[0], record_1[1])),
 
753
                   (('rev-id-2',), (('rev-id-2',), record_2[0], record_2[1]))]
830
754
 
831
755
        contents = list(knit._read_records_iter(records))
832
 
        self.assertEqual([((b'rev-id-1',), [b'foo\n', b'bar\n'], record_1[2]),
833
 
                          ((b'rev-id-2',), [b'baz\n'], record_2[2])],
 
756
        self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'], record_1[2]),
 
757
                          (('rev-id-2',), ['baz\n'], record_2[2])],
834
758
                         contents)
835
759
 
836
760
        raw_contents = list(knit._read_records_iter_raw(records))
837
 
        self.assertEqual([((b'rev-id-1',), total_txt[0], record_1[2]),
838
 
                          ((b'rev-id-2',), total_txt[1], record_2[2])],
 
761
        self.assertEqual([(('rev-id-1',), total_txt[0], record_1[2]),
 
762
                          (('rev-id-2',), total_txt[1], record_2[2])],
839
763
                         raw_contents)
840
764
 
841
765
    def test_not_enough_lines(self):
842
 
        sha1sum = osutils.sha_string(b'foo\n')
 
766
        sha1sum = osutils.sha('foo\n').hexdigest()
843
767
        # record says 2 lines data says 1
844
 
        gz_txt = self.create_gz_content(b'version rev-id-1 2 %s\n'
845
 
                                        b'foo\n'
846
 
                                        b'end rev-id-1\n'
 
768
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
 
769
                                        'foo\n'
 
770
                                        'end rev-id-1\n'
847
771
                                        % (sha1sum,))
848
772
        transport = MockTransport([gz_txt])
849
773
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
850
774
        knit = KnitVersionedFiles(None, access)
851
 
        records = [((b'rev-id-1',), ((b'rev-id-1',), 0, len(gz_txt)))]
852
 
        self.assertRaises(KnitCorrupt, list,
853
 
                          knit._read_records_iter(records))
 
775
        records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
 
776
        self.assertRaises(errors.KnitCorrupt, list,
 
777
            knit._read_records_iter(records))
854
778
 
855
779
        # read_records_iter_raw won't detect that sort of mismatch/corruption
856
780
        raw_contents = list(knit._read_records_iter_raw(records))
857
 
        self.assertEqual([((b'rev-id-1',), gz_txt, sha1sum)], raw_contents)
 
781
        self.assertEqual([(('rev-id-1',),  gz_txt, sha1sum)], raw_contents)
858
782
 
859
783
    def test_too_many_lines(self):
860
 
        sha1sum = osutils.sha_string(b'foo\nbar\n')
 
784
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
861
785
        # record says 1 lines data says 2
862
 
        gz_txt = self.create_gz_content(b'version rev-id-1 1 %s\n'
863
 
                                        b'foo\n'
864
 
                                        b'bar\n'
865
 
                                        b'end rev-id-1\n'
 
786
        gz_txt = self.create_gz_content('version rev-id-1 1 %s\n'
 
787
                                        'foo\n'
 
788
                                        'bar\n'
 
789
                                        'end rev-id-1\n'
866
790
                                        % (sha1sum,))
867
791
        transport = MockTransport([gz_txt])
868
792
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
869
793
        knit = KnitVersionedFiles(None, access)
870
 
        records = [((b'rev-id-1',), ((b'rev-id-1',), 0, len(gz_txt)))]
871
 
        self.assertRaises(KnitCorrupt, list,
872
 
                          knit._read_records_iter(records))
 
794
        records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
 
795
        self.assertRaises(errors.KnitCorrupt, list,
 
796
            knit._read_records_iter(records))
873
797
 
874
798
        # read_records_iter_raw won't detect that sort of mismatch/corruption
875
799
        raw_contents = list(knit._read_records_iter_raw(records))
876
 
        self.assertEqual([((b'rev-id-1',), gz_txt, sha1sum)], raw_contents)
 
800
        self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
877
801
 
878
802
    def test_mismatched_version_id(self):
879
 
        sha1sum = osutils.sha_string(b'foo\nbar\n')
880
 
        gz_txt = self.create_gz_content(b'version rev-id-1 2 %s\n'
881
 
                                        b'foo\n'
882
 
                                        b'bar\n'
883
 
                                        b'end rev-id-1\n'
 
803
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
 
804
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
 
805
                                        'foo\n'
 
806
                                        'bar\n'
 
807
                                        'end rev-id-1\n'
884
808
                                        % (sha1sum,))
885
809
        transport = MockTransport([gz_txt])
886
810
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
887
811
        knit = KnitVersionedFiles(None, access)
888
812
        # We are asking for rev-id-2, but the data is rev-id-1
889
 
        records = [((b'rev-id-2',), ((b'rev-id-2',), 0, len(gz_txt)))]
890
 
        self.assertRaises(KnitCorrupt, list,
891
 
                          knit._read_records_iter(records))
 
813
        records = [(('rev-id-2',), (('rev-id-2',), 0, len(gz_txt)))]
 
814
        self.assertRaises(errors.KnitCorrupt, list,
 
815
            knit._read_records_iter(records))
892
816
 
893
817
        # read_records_iter_raw detects mismatches in the header
894
 
        self.assertRaises(KnitCorrupt, list,
895
 
                          knit._read_records_iter_raw(records))
 
818
        self.assertRaises(errors.KnitCorrupt, list,
 
819
            knit._read_records_iter_raw(records))
896
820
 
897
821
    def test_uncompressed_data(self):
898
 
        sha1sum = osutils.sha_string(b'foo\nbar\n')
899
 
        txt = (b'version rev-id-1 2 %s\n'
900
 
               b'foo\n'
901
 
               b'bar\n'
902
 
               b'end rev-id-1\n'
 
822
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
 
823
        txt = ('version rev-id-1 2 %s\n'
 
824
               'foo\n'
 
825
               'bar\n'
 
826
               'end rev-id-1\n'
903
827
               % (sha1sum,))
904
828
        transport = MockTransport([txt])
905
829
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
906
830
        knit = KnitVersionedFiles(None, access)
907
 
        records = [((b'rev-id-1',), ((b'rev-id-1',), 0, len(txt)))]
 
831
        records = [(('rev-id-1',), (('rev-id-1',), 0, len(txt)))]
908
832
 
909
833
        # We don't have valid gzip data ==> corrupt
910
 
        self.assertRaises(KnitCorrupt, list,
911
 
                          knit._read_records_iter(records))
 
834
        self.assertRaises(errors.KnitCorrupt, list,
 
835
            knit._read_records_iter(records))
912
836
 
913
837
        # read_records_iter_raw will notice the bad data
914
 
        self.assertRaises(KnitCorrupt, list,
915
 
                          knit._read_records_iter_raw(records))
 
838
        self.assertRaises(errors.KnitCorrupt, list,
 
839
            knit._read_records_iter_raw(records))
916
840
 
917
841
    def test_corrupted_data(self):
918
 
        sha1sum = osutils.sha_string(b'foo\nbar\n')
919
 
        gz_txt = self.create_gz_content(b'version rev-id-1 2 %s\n'
920
 
                                        b'foo\n'
921
 
                                        b'bar\n'
922
 
                                        b'end rev-id-1\n'
 
842
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
 
843
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
 
844
                                        'foo\n'
 
845
                                        'bar\n'
 
846
                                        'end rev-id-1\n'
923
847
                                        % (sha1sum,))
924
848
        # Change 2 bytes in the middle to \xff
925
 
        gz_txt = gz_txt[:10] + b'\xff\xff' + gz_txt[12:]
 
849
        gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
926
850
        transport = MockTransport([gz_txt])
927
851
        access = _KnitKeyAccess(transport, ConstantMapper('filename'))
928
852
        knit = KnitVersionedFiles(None, access)
929
 
        records = [((b'rev-id-1',), ((b'rev-id-1',), 0, len(gz_txt)))]
930
 
        self.assertRaises(KnitCorrupt, list,
931
 
                          knit._read_records_iter(records))
 
853
        records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
 
854
        self.assertRaises(errors.KnitCorrupt, list,
 
855
            knit._read_records_iter(records))
932
856
        # read_records_iter_raw will barf on bad gz data
933
 
        self.assertRaises(KnitCorrupt, list,
934
 
                          knit._read_records_iter_raw(records))
 
857
        self.assertRaises(errors.KnitCorrupt, list,
 
858
            knit._read_records_iter_raw(records))
935
859
 
936
860
 
937
861
class LowLevelKnitIndexTests(TestCase):
938
862
 
939
 
    @property
940
 
    def _load_data(self):
941
 
        from ..bzr._knit_load_data_py import _load_data_py
942
 
        return _load_data_py
943
 
 
944
863
    def get_knit_index(self, transport, name, mode):
945
864
        mapper = ConstantMapper(name)
946
 
        self.overrideAttr(knit, '_load_data', self._load_data)
947
 
 
948
 
        def allow_writes():
949
 
            return 'w' in mode
950
 
        return _KndxIndex(transport, mapper, lambda: None, allow_writes, lambda: True)
 
865
        from bzrlib._knit_load_data_py import _load_data_py
 
866
        self.overrideAttr(knit, '_load_data', _load_data_py)
 
867
        allow_writes = lambda: 'w' in mode
 
868
        return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
951
869
 
952
870
    def test_create_file(self):
953
871
        transport = MockTransport()
954
872
        index = self.get_knit_index(transport, "filename", "w")
955
873
        index.keys()
956
874
        call = transport.calls.pop(0)
957
 
        # call[1][1] is a BytesIO - we can't test it by simple equality.
 
875
        # call[1][1] is a StringIO - we can't test it by simple equality.
958
876
        self.assertEqual('put_file_non_atomic', call[0])
959
877
        self.assertEqual('filename.kndx', call[1][0])
960
878
        # With no history, _KndxIndex writes a new index:
961
879
        self.assertEqual(_KndxIndex.HEADER,
962
 
                         call[1][1].getvalue())
 
880
            call[1][1].getvalue())
963
881
        self.assertEqual({'create_parent_dir': True}, call[2])
964
882
 
965
883
    def test_read_utf8_version_id(self):
967
885
        utf8_revision_id = unicode_revision_id.encode('utf-8')
968
886
        transport = MockTransport([
969
887
            _KndxIndex.HEADER,
970
 
            b'%s option 0 1 :' % (utf8_revision_id,)
 
888
            '%s option 0 1 :' % (utf8_revision_id,)
971
889
            ])
972
890
        index = self.get_knit_index(transport, "filename", "r")
973
891
        # _KndxIndex is a private class, and deals in utf8 revision_ids, not
974
892
        # Unicode revision_ids.
975
 
        self.assertEqual({(utf8_revision_id,): ()},
976
 
                         index.get_parent_map(index.keys()))
 
893
        self.assertEqual({(utf8_revision_id,):()},
 
894
            index.get_parent_map(index.keys()))
977
895
        self.assertFalse((unicode_revision_id,) in index.keys())
978
896
 
979
897
    def test_read_utf8_parents(self):
981
899
        utf8_revision_id = unicode_revision_id.encode('utf-8')
982
900
        transport = MockTransport([
983
901
            _KndxIndex.HEADER,
984
 
            b"version option 0 1 .%s :" % (utf8_revision_id,)
 
902
            "version option 0 1 .%s :" % (utf8_revision_id,)
985
903
            ])
986
904
        index = self.get_knit_index(transport, "filename", "r")
987
 
        self.assertEqual({(b"version",): ((utf8_revision_id,),)},
988
 
                         index.get_parent_map(index.keys()))
 
905
        self.assertEqual({("version",):((utf8_revision_id,),)},
 
906
            index.get_parent_map(index.keys()))
989
907
 
990
908
    def test_read_ignore_corrupted_lines(self):
991
909
        transport = MockTransport([
992
910
            _KndxIndex.HEADER,
993
 
            b"corrupted",
994
 
            b"corrupted options 0 1 .b .c ",
995
 
            b"version options 0 1 :"
 
911
            "corrupted",
 
912
            "corrupted options 0 1 .b .c ",
 
913
            "version options 0 1 :"
996
914
            ])
997
915
        index = self.get_knit_index(transport, "filename", "r")
998
916
        self.assertEqual(1, len(index.keys()))
999
 
        self.assertEqual({(b"version",)}, index.keys())
 
917
        self.assertEqual(set([("version",)]), index.keys())
1000
918
 
1001
919
    def test_read_corrupted_header(self):
1002
 
        transport = MockTransport([b'not a bzr knit index header\n'])
 
920
        transport = MockTransport(['not a bzr knit index header\n'])
1003
921
        index = self.get_knit_index(transport, "filename", "r")
1004
922
        self.assertRaises(KnitHeaderError, index.keys)
1005
923
 
1006
924
    def test_read_duplicate_entries(self):
1007
925
        transport = MockTransport([
1008
926
            _KndxIndex.HEADER,
1009
 
            b"parent options 0 1 :",
1010
 
            b"version options1 0 1 0 :",
1011
 
            b"version options2 1 2 .other :",
1012
 
            b"version options3 3 4 0 .other :"
 
927
            "parent options 0 1 :",
 
928
            "version options1 0 1 0 :",
 
929
            "version options2 1 2 .other :",
 
930
            "version options3 3 4 0 .other :"
1013
931
            ])
1014
932
        index = self.get_knit_index(transport, "filename", "r")
1015
933
        self.assertEqual(2, len(index.keys()))
1016
934
        # check that the index used is the first one written. (Specific
1017
935
        # to KnitIndex style indices.
1018
 
        self.assertEqual(b"1", index._dictionary_compress([(b"version",)]))
1019
 
        self.assertEqual(((b"version",), 3, 4),
1020
 
                         index.get_position((b"version",)))
1021
 
        self.assertEqual([b"options3"], index.get_options((b"version",)))
1022
 
        self.assertEqual({(b"version",): ((b"parent",), (b"other",))},
1023
 
                         index.get_parent_map([(b"version",)]))
 
936
        self.assertEqual("1", index._dictionary_compress([("version",)]))
 
937
        self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
 
938
        self.assertEqual(["options3"], index.get_options(("version",)))
 
939
        self.assertEqual({("version",):(("parent",), ("other",))},
 
940
            index.get_parent_map([("version",)]))
1024
941
 
1025
942
    def test_read_compressed_parents(self):
1026
943
        transport = MockTransport([
1027
944
            _KndxIndex.HEADER,
1028
 
            b"a option 0 1 :",
1029
 
            b"b option 0 1 0 :",
1030
 
            b"c option 0 1 1 0 :",
 
945
            "a option 0 1 :",
 
946
            "b option 0 1 0 :",
 
947
            "c option 0 1 1 0 :",
1031
948
            ])
1032
949
        index = self.get_knit_index(transport, "filename", "r")
1033
 
        self.assertEqual({(b"b",): ((b"a",),), (b"c",): ((b"b",), (b"a",))},
1034
 
                         index.get_parent_map([(b"b",), (b"c",)]))
 
950
        self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
 
951
            index.get_parent_map([("b",), ("c",)]))
1035
952
 
1036
953
    def test_write_utf8_version_id(self):
1037
954
        unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
1041
958
            ])
1042
959
        index = self.get_knit_index(transport, "filename", "r")
1043
960
        index.add_records([
1044
 
            ((utf8_revision_id,), [b"option"], ((utf8_revision_id,), 0, 1), [])])
 
961
            ((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
1045
962
        call = transport.calls.pop(0)
1046
 
        # call[1][1] is a BytesIO - we can't test it by simple equality.
 
963
        # call[1][1] is a StringIO - we can't test it by simple equality.
1047
964
        self.assertEqual('put_file_non_atomic', call[0])
1048
965
        self.assertEqual('filename.kndx', call[1][0])
1049
966
        # With no history, _KndxIndex writes a new index:
1050
967
        self.assertEqual(_KndxIndex.HEADER +
1051
 
                         b"\n%s option 0 1  :" % (utf8_revision_id,),
1052
 
                         call[1][1].getvalue())
 
968
            "\n%s option 0 1  :" % (utf8_revision_id,),
 
969
            call[1][1].getvalue())
1053
970
        self.assertEqual({'create_parent_dir': True}, call[2])
1054
971
 
1055
972
    def test_write_utf8_parents(self):
1060
977
            ])
1061
978
        index = self.get_knit_index(transport, "filename", "r")
1062
979
        index.add_records([
1063
 
            ((b"version",), [b"option"], ((b"version",), 0, 1), [(utf8_revision_id,)])])
 
980
            (("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
1064
981
        call = transport.calls.pop(0)
1065
 
        # call[1][1] is a BytesIO - we can't test it by simple equality.
 
982
        # call[1][1] is a StringIO - we can't test it by simple equality.
1066
983
        self.assertEqual('put_file_non_atomic', call[0])
1067
984
        self.assertEqual('filename.kndx', call[1][0])
1068
985
        # With no history, _KndxIndex writes a new index:
1069
986
        self.assertEqual(_KndxIndex.HEADER +
1070
 
                         b"\nversion option 0 1 .%s :" % (utf8_revision_id,),
1071
 
                         call[1][1].getvalue())
 
987
            "\nversion option 0 1 .%s :" % (utf8_revision_id,),
 
988
            call[1][1].getvalue())
1072
989
        self.assertEqual({'create_parent_dir': True}, call[2])
1073
990
 
1074
991
    def test_keys(self):
1079
996
 
1080
997
        self.assertEqual(set(), index.keys())
1081
998
 
1082
 
        index.add_records([((b"a",), [b"option"], ((b"a",), 0, 1), [])])
1083
 
        self.assertEqual({(b"a",)}, index.keys())
1084
 
 
1085
 
        index.add_records([((b"a",), [b"option"], ((b"a",), 0, 1), [])])
1086
 
        self.assertEqual({(b"a",)}, index.keys())
1087
 
 
1088
 
        index.add_records([((b"b",), [b"option"], ((b"b",), 0, 1), [])])
1089
 
        self.assertEqual({(b"a",), (b"b",)}, index.keys())
 
999
        index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
 
1000
        self.assertEqual(set([("a",)]), index.keys())
 
1001
 
 
1002
        index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
 
1003
        self.assertEqual(set([("a",)]), index.keys())
 
1004
 
 
1005
        index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
 
1006
        self.assertEqual(set([("a",), ("b",)]), index.keys())
1090
1007
 
1091
1008
    def add_a_b(self, index, random_id=None):
1092
1009
        kwargs = {}
1093
1010
        if random_id is not None:
1094
1011
            kwargs["random_id"] = random_id
1095
1012
        index.add_records([
1096
 
            ((b"a",), [b"option"], ((b"a",), 0, 1), [(b"b",)]),
1097
 
            ((b"a",), [b"opt"], ((b"a",), 1, 2), [(b"c",)]),
1098
 
            ((b"b",), [b"option"], ((b"b",), 2, 3), [(b"a",)])
 
1013
            (("a",), ["option"], (("a",), 0, 1), [("b",)]),
 
1014
            (("a",), ["opt"], (("a",), 1, 2), [("c",)]),
 
1015
            (("b",), ["option"], (("b",), 2, 3), [("a",)])
1099
1016
            ], **kwargs)
1100
1017
 
1101
1018
    def assertIndexIsAB(self, index):
1102
1019
        self.assertEqual({
1103
 
            (b'a',): ((b'c',),),
1104
 
            (b'b',): ((b'a',),),
 
1020
            ('a',): (('c',),),
 
1021
            ('b',): (('a',),),
1105
1022
            },
1106
1023
            index.get_parent_map(index.keys()))
1107
 
        self.assertEqual(((b"a",), 1, 2), index.get_position((b"a",)))
1108
 
        self.assertEqual(((b"b",), 2, 3), index.get_position((b"b",)))
1109
 
        self.assertEqual([b"opt"], index.get_options((b"a",)))
 
1024
        self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
 
1025
        self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
 
1026
        self.assertEqual(["opt"], index.get_options(("a",)))
1110
1027
 
1111
1028
    def test_add_versions(self):
1112
1029
        transport = MockTransport([
1116
1033
 
1117
1034
        self.add_a_b(index)
1118
1035
        call = transport.calls.pop(0)
1119
 
        # call[1][1] is a BytesIO - we can't test it by simple equality.
 
1036
        # call[1][1] is a StringIO - we can't test it by simple equality.
1120
1037
        self.assertEqual('put_file_non_atomic', call[0])
1121
1038
        self.assertEqual('filename.kndx', call[1][0])
1122
1039
        # With no history, _KndxIndex writes a new index:
1123
1040
        self.assertEqual(
1124
1041
            _KndxIndex.HEADER +
1125
 
            b"\na option 0 1 .b :"
1126
 
            b"\na opt 1 2 .c :"
1127
 
            b"\nb option 2 3 0 :",
 
1042
            "\na option 0 1 .b :"
 
1043
            "\na opt 1 2 .c :"
 
1044
            "\nb option 2 3 0 :",
1128
1045
            call[1][1].getvalue())
1129
1046
        self.assertEqual({'create_parent_dir': True}, call[2])
1130
1047
        self.assertIndexIsAB(index)
1143
1060
        # dir_mode=0777)
1144
1061
        self.assertEqual([], transport.calls)
1145
1062
        self.add_a_b(index)
1146
 
        # self.assertEqual(
1147
 
        # [    {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
 
1063
        #self.assertEqual(
 
1064
        #[    {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
1148
1065
        #    kwargs)
1149
1066
        # Two calls: one during which we load the existing index (and when its
1150
1067
        # missing create it), then a second where we write the contents out.
1156
1073
        self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
1157
1074
        self.assertEqual({'create_parent_dir': True}, call[2])
1158
1075
        call = transport.calls.pop(0)
1159
 
        # call[1][1] is a BytesIO - we can't test it by simple equality.
 
1076
        # call[1][1] is a StringIO - we can't test it by simple equality.
1160
1077
        self.assertEqual('put_file_non_atomic', call[0])
1161
1078
        self.assertEqual('filename.kndx', call[1][0])
1162
1079
        # With no history, _KndxIndex writes a new index:
1163
1080
        self.assertEqual(
1164
1081
            _KndxIndex.HEADER +
1165
 
            b"\na option 0 1 .b :"
1166
 
            b"\na opt 1 2 .c :"
1167
 
            b"\nb option 2 3 0 :",
 
1082
            "\na option 0 1 .b :"
 
1083
            "\na opt 1 2 .c :"
 
1084
            "\nb option 2 3 0 :",
1168
1085
            call[1][1].getvalue())
1169
1086
        self.assertEqual({'create_parent_dir': True}, call[2])
1170
1087
 
1174
1091
 
1175
1092
    def test__get_total_build_size(self):
1176
1093
        positions = {
1177
 
            (b'a',): (('fulltext', False), ((b'a',), 0, 100), None),
1178
 
            (b'b',): (('line-delta', False), ((b'b',), 100, 21), (b'a',)),
1179
 
            (b'c',): (('line-delta', False), ((b'c',), 121, 35), (b'b',)),
1180
 
            (b'd',): (('line-delta', False), ((b'd',), 156, 12), (b'b',)),
 
1094
            ('a',): (('fulltext', False), (('a',), 0, 100), None),
 
1095
            ('b',): (('line-delta', False), (('b',), 100, 21), ('a',)),
 
1096
            ('c',): (('line-delta', False), (('c',), 121, 35), ('b',)),
 
1097
            ('d',): (('line-delta', False), (('d',), 156, 12), ('b',)),
1181
1098
            }
1182
 
        self.assertTotalBuildSize(100, [(b'a',)], positions)
1183
 
        self.assertTotalBuildSize(121, [(b'b',)], positions)
 
1099
        self.assertTotalBuildSize(100, [('a',)], positions)
 
1100
        self.assertTotalBuildSize(121, [('b',)], positions)
1184
1101
        # c needs both a & b
1185
 
        self.assertTotalBuildSize(156, [(b'c',)], positions)
 
1102
        self.assertTotalBuildSize(156, [('c',)], positions)
1186
1103
        # we shouldn't count 'b' twice
1187
 
        self.assertTotalBuildSize(156, [(b'b',), (b'c',)], positions)
1188
 
        self.assertTotalBuildSize(133, [(b'd',)], positions)
1189
 
        self.assertTotalBuildSize(168, [(b'c',), (b'd',)], positions)
 
1104
        self.assertTotalBuildSize(156, [('b',), ('c',)], positions)
 
1105
        self.assertTotalBuildSize(133, [('d',)], positions)
 
1106
        self.assertTotalBuildSize(168, [('c',), ('d',)], positions)
1190
1107
 
1191
1108
    def test_get_position(self):
1192
1109
        transport = MockTransport([
1193
1110
            _KndxIndex.HEADER,
1194
 
            b"a option 0 1 :",
1195
 
            b"b option 1 2 :"
 
1111
            "a option 0 1 :",
 
1112
            "b option 1 2 :"
1196
1113
            ])
1197
1114
        index = self.get_knit_index(transport, "filename", "r")
1198
1115
 
1199
 
        self.assertEqual(((b"a",), 0, 1), index.get_position((b"a",)))
1200
 
        self.assertEqual(((b"b",), 1, 2), index.get_position((b"b",)))
 
1116
        self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
 
1117
        self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
1201
1118
 
1202
1119
    def test_get_method(self):
1203
1120
        transport = MockTransport([
1204
1121
            _KndxIndex.HEADER,
1205
 
            b"a fulltext,unknown 0 1 :",
1206
 
            b"b unknown,line-delta 1 2 :",
1207
 
            b"c bad 3 4 :"
 
1122
            "a fulltext,unknown 0 1 :",
 
1123
            "b unknown,line-delta 1 2 :",
 
1124
            "c bad 3 4 :"
1208
1125
            ])
1209
1126
        index = self.get_knit_index(transport, "filename", "r")
1210
1127
 
1211
 
        self.assertEqual("fulltext", index.get_method(b"a"))
1212
 
        self.assertEqual("line-delta", index.get_method(b"b"))
1213
 
        self.assertRaises(knit.KnitIndexUnknownMethod, index.get_method, b"c")
 
1128
        self.assertEqual("fulltext", index.get_method("a"))
 
1129
        self.assertEqual("line-delta", index.get_method("b"))
 
1130
        self.assertRaises(errors.KnitIndexUnknownMethod, index.get_method, "c")
1214
1131
 
1215
1132
    def test_get_options(self):
1216
1133
        transport = MockTransport([
1217
1134
            _KndxIndex.HEADER,
1218
 
            b"a opt1 0 1 :",
1219
 
            b"b opt2,opt3 1 2 :"
 
1135
            "a opt1 0 1 :",
 
1136
            "b opt2,opt3 1 2 :"
1220
1137
            ])
1221
1138
        index = self.get_knit_index(transport, "filename", "r")
1222
1139
 
1223
 
        self.assertEqual([b"opt1"], index.get_options(b"a"))
1224
 
        self.assertEqual([b"opt2", b"opt3"], index.get_options(b"b"))
 
1140
        self.assertEqual(["opt1"], index.get_options("a"))
 
1141
        self.assertEqual(["opt2", "opt3"], index.get_options("b"))
1225
1142
 
1226
1143
    def test_get_parent_map(self):
1227
1144
        transport = MockTransport([
1228
1145
            _KndxIndex.HEADER,
1229
 
            b"a option 0 1 :",
1230
 
            b"b option 1 2 0 .c :",
1231
 
            b"c option 1 2 1 0 .e :"
 
1146
            "a option 0 1 :",
 
1147
            "b option 1 2 0 .c :",
 
1148
            "c option 1 2 1 0 .e :"
1232
1149
            ])
1233
1150
        index = self.get_knit_index(transport, "filename", "r")
1234
1151
 
1235
1152
        self.assertEqual({
1236
 
            (b"a",): (),
1237
 
            (b"b",): ((b"a",), (b"c",)),
1238
 
            (b"c",): ((b"b",), (b"a",), (b"e",)),
 
1153
            ("a",):(),
 
1154
            ("b",):(("a",), ("c",)),
 
1155
            ("c",):(("b",), ("a",), ("e",)),
1239
1156
            }, index.get_parent_map(index.keys()))
1240
1157
 
1241
1158
    def test_impossible_parent(self):
1242
1159
        """Test we get KnitCorrupt if the parent couldn't possibly exist."""
1243
1160
        transport = MockTransport([
1244
1161
            _KndxIndex.HEADER,
1245
 
            b"a option 0 1 :",
1246
 
            b"b option 0 1 4 :"  # We don't have a 4th record
 
1162
            "a option 0 1 :",
 
1163
            "b option 0 1 4 :"  # We don't have a 4th record
1247
1164
            ])
1248
1165
        index = self.get_knit_index(transport, 'filename', 'r')
1249
 
        self.assertRaises(KnitCorrupt, index.keys)
 
1166
        try:
 
1167
            self.assertRaises(errors.KnitCorrupt, index.keys)
 
1168
        except TypeError, e:
 
1169
            if (str(e) == ('exceptions must be strings, classes, or instances,'
 
1170
                           ' not exceptions.IndexError')
 
1171
                and sys.version_info[0:2] >= (2,5)):
 
1172
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
 
1173
                                  ' raising new style exceptions with python'
 
1174
                                  ' >=2.5')
 
1175
            else:
 
1176
                raise
1250
1177
 
1251
1178
    def test_corrupted_parent(self):
1252
1179
        transport = MockTransport([
1253
1180
            _KndxIndex.HEADER,
1254
 
            b"a option 0 1 :",
1255
 
            b"b option 0 1 :",
1256
 
            b"c option 0 1 1v :",  # Can't have a parent of '1v'
 
1181
            "a option 0 1 :",
 
1182
            "b option 0 1 :",
 
1183
            "c option 0 1 1v :", # Can't have a parent of '1v'
1257
1184
            ])
1258
1185
        index = self.get_knit_index(transport, 'filename', 'r')
1259
 
        self.assertRaises(KnitCorrupt, index.keys)
 
1186
        try:
 
1187
            self.assertRaises(errors.KnitCorrupt, index.keys)
 
1188
        except TypeError, e:
 
1189
            if (str(e) == ('exceptions must be strings, classes, or instances,'
 
1190
                           ' not exceptions.ValueError')
 
1191
                and sys.version_info[0:2] >= (2,5)):
 
1192
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
 
1193
                                  ' raising new style exceptions with python'
 
1194
                                  ' >=2.5')
 
1195
            else:
 
1196
                raise
1260
1197
 
1261
1198
    def test_corrupted_parent_in_list(self):
1262
1199
        transport = MockTransport([
1263
1200
            _KndxIndex.HEADER,
1264
 
            b"a option 0 1 :",
1265
 
            b"b option 0 1 :",
1266
 
            b"c option 0 1 1 v :",  # Can't have a parent of 'v'
 
1201
            "a option 0 1 :",
 
1202
            "b option 0 1 :",
 
1203
            "c option 0 1 1 v :", # Can't have a parent of 'v'
1267
1204
            ])
1268
1205
        index = self.get_knit_index(transport, 'filename', 'r')
1269
 
        self.assertRaises(KnitCorrupt, index.keys)
 
1206
        try:
 
1207
            self.assertRaises(errors.KnitCorrupt, index.keys)
 
1208
        except TypeError, e:
 
1209
            if (str(e) == ('exceptions must be strings, classes, or instances,'
 
1210
                           ' not exceptions.ValueError')
 
1211
                and sys.version_info[0:2] >= (2,5)):
 
1212
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
 
1213
                                  ' raising new style exceptions with python'
 
1214
                                  ' >=2.5')
 
1215
            else:
 
1216
                raise
1270
1217
 
1271
1218
    def test_invalid_position(self):
1272
1219
        transport = MockTransport([
1273
1220
            _KndxIndex.HEADER,
1274
 
            b"a option 1v 1 :",
 
1221
            "a option 1v 1 :",
1275
1222
            ])
1276
1223
        index = self.get_knit_index(transport, 'filename', 'r')
1277
 
        self.assertRaises(KnitCorrupt, index.keys)
 
1224
        try:
 
1225
            self.assertRaises(errors.KnitCorrupt, index.keys)
 
1226
        except TypeError, e:
 
1227
            if (str(e) == ('exceptions must be strings, classes, or instances,'
 
1228
                           ' not exceptions.ValueError')
 
1229
                and sys.version_info[0:2] >= (2,5)):
 
1230
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
 
1231
                                  ' raising new style exceptions with python'
 
1232
                                  ' >=2.5')
 
1233
            else:
 
1234
                raise
1278
1235
 
1279
1236
    def test_invalid_size(self):
1280
1237
        transport = MockTransport([
1281
1238
            _KndxIndex.HEADER,
1282
 
            b"a option 1 1v :",
 
1239
            "a option 1 1v :",
1283
1240
            ])
1284
1241
        index = self.get_knit_index(transport, 'filename', 'r')
1285
 
        self.assertRaises(KnitCorrupt, index.keys)
 
1242
        try:
 
1243
            self.assertRaises(errors.KnitCorrupt, index.keys)
 
1244
        except TypeError, e:
 
1245
            if (str(e) == ('exceptions must be strings, classes, or instances,'
 
1246
                           ' not exceptions.ValueError')
 
1247
                and sys.version_info[0:2] >= (2,5)):
 
1248
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
 
1249
                                  ' raising new style exceptions with python'
 
1250
                                  ' >=2.5')
 
1251
            else:
 
1252
                raise
1286
1253
 
1287
1254
    def test_scan_unvalidated_index_not_implemented(self):
1288
1255
        transport = MockTransport()
1296
1263
    def test_short_line(self):
1297
1264
        transport = MockTransport([
1298
1265
            _KndxIndex.HEADER,
1299
 
            b"a option 0 10  :",
1300
 
            b"b option 10 10 0",  # This line isn't terminated, ignored
 
1266
            "a option 0 10  :",
 
1267
            "b option 10 10 0", # This line isn't terminated, ignored
1301
1268
            ])
1302
1269
        index = self.get_knit_index(transport, "filename", "r")
1303
 
        self.assertEqual({(b'a',)}, index.keys())
 
1270
        self.assertEqual(set([('a',)]), index.keys())
1304
1271
 
1305
1272
    def test_skip_incomplete_record(self):
1306
1273
        # A line with bogus data should just be skipped
1307
1274
        transport = MockTransport([
1308
1275
            _KndxIndex.HEADER,
1309
 
            b"a option 0 10  :",
1310
 
            b"b option 10 10 0",  # This line isn't terminated, ignored
1311
 
            b"c option 20 10 0 :",  # Properly terminated, and starts with '\n'
 
1276
            "a option 0 10  :",
 
1277
            "b option 10 10 0", # This line isn't terminated, ignored
 
1278
            "c option 20 10 0 :", # Properly terminated, and starts with '\n'
1312
1279
            ])
1313
1280
        index = self.get_knit_index(transport, "filename", "r")
1314
 
        self.assertEqual({(b'a',), (b'c',)}, index.keys())
 
1281
        self.assertEqual(set([('a',), ('c',)]), index.keys())
1315
1282
 
1316
1283
    def test_trailing_characters(self):
1317
1284
        # A line with bogus data should just be skipped
1318
1285
        transport = MockTransport([
1319
1286
            _KndxIndex.HEADER,
1320
 
            b"a option 0 10  :",
1321
 
            b"b option 10 10 0 :a",  # This line has extra trailing characters
1322
 
            b"c option 20 10 0 :",  # Properly terminated, and starts with '\n'
 
1287
            "a option 0 10  :",
 
1288
            "b option 10 10 0 :a", # This line has extra trailing characters
 
1289
            "c option 20 10 0 :", # Properly terminated, and starts with '\n'
1323
1290
            ])
1324
1291
        index = self.get_knit_index(transport, "filename", "r")
1325
 
        self.assertEqual({(b'a',), (b'c',)}, index.keys())
 
1292
        self.assertEqual(set([('a',), ('c',)]), index.keys())
1326
1293
 
1327
1294
 
1328
1295
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1329
1296
 
1330
1297
    _test_needs_features = [compiled_knit_feature]
1331
1298
 
1332
 
    @property
1333
 
    def _load_data(self):
1334
 
        from ..bzr._knit_load_data_pyx import _load_data_c
1335
 
        return _load_data_c
 
1299
    def get_knit_index(self, transport, name, mode):
 
1300
        mapper = ConstantMapper(name)
 
1301
        from bzrlib._knit_load_data_pyx import _load_data_c
 
1302
        self.overrideAttr(knit, '_load_data', _load_data_c)
 
1303
        allow_writes = lambda: mode == 'w'
 
1304
        return _KndxIndex(transport, mapper, lambda:None,
 
1305
                          allow_writes, lambda:True)
1336
1306
 
1337
1307
 
1338
1308
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1344
1314
 
1345
1315
    def test__expand_fulltext(self):
1346
1316
        ann = self.make_annotator()
1347
 
        rev_key = (b'rev-id',)
 
1317
        rev_key = ('rev-id',)
1348
1318
        ann._num_compression_children[rev_key] = 1
1349
 
        res = ann._expand_record(rev_key, ((b'parent-id',),), None,
1350
 
                                 [b'line1\n', b'line2\n'], ('fulltext', True))
 
1319
        res = ann._expand_record(rev_key, (('parent-id',),), None,
 
1320
                           ['line1\n', 'line2\n'], ('fulltext', True))
1351
1321
        # The content object and text lines should be cached appropriately
1352
 
        self.assertEqual([b'line1\n', b'line2'], res)
 
1322
        self.assertEqual(['line1\n', 'line2'], res)
1353
1323
        content_obj = ann._content_objects[rev_key]
1354
 
        self.assertEqual([b'line1\n', b'line2\n'], content_obj._lines)
 
1324
        self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1355
1325
        self.assertEqual(res, content_obj.text())
1356
1326
        self.assertEqual(res, ann._text_cache[rev_key])
1357
1327
 
1359
1329
        # Parent isn't available yet, so we return nothing, but queue up this
1360
1330
        # node for later processing
1361
1331
        ann = self.make_annotator()
1362
 
        rev_key = (b'rev-id',)
1363
 
        parent_key = (b'parent-id',)
1364
 
        record = [b'0,1,1\n', b'new-line\n']
 
1332
        rev_key = ('rev-id',)
 
1333
        parent_key = ('parent-id',)
 
1334
        record = ['0,1,1\n', 'new-line\n']
1365
1335
        details = ('line-delta', False)
1366
1336
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1367
1337
                                 record, details)
1373
1343
 
1374
1344
    def test__expand_record_tracks_num_children(self):
1375
1345
        ann = self.make_annotator()
1376
 
        rev_key = (b'rev-id',)
1377
 
        rev2_key = (b'rev2-id',)
1378
 
        parent_key = (b'parent-id',)
1379
 
        record = [b'0,1,1\n', b'new-line\n']
 
1346
        rev_key = ('rev-id',)
 
1347
        rev2_key = ('rev2-id',)
 
1348
        parent_key = ('parent-id',)
 
1349
        record = ['0,1,1\n', 'new-line\n']
1380
1350
        details = ('line-delta', False)
1381
1351
        ann._num_compression_children[parent_key] = 2
1382
 
        ann._expand_record(parent_key, (), None, [b'line1\n', b'line2\n'],
 
1352
        ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1383
1353
                           ('fulltext', False))
1384
1354
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1385
1355
                                 record, details)
1396
1366
 
1397
1367
    def test__expand_delta_records_blocks(self):
1398
1368
        ann = self.make_annotator()
1399
 
        rev_key = (b'rev-id',)
1400
 
        parent_key = (b'parent-id',)
1401
 
        record = [b'0,1,1\n', b'new-line\n']
 
1369
        rev_key = ('rev-id',)
 
1370
        parent_key = ('parent-id',)
 
1371
        record = ['0,1,1\n', 'new-line\n']
1402
1372
        details = ('line-delta', True)
1403
1373
        ann._num_compression_children[parent_key] = 2
1404
1374
        ann._expand_record(parent_key, (), None,
1405
 
                           [b'line1\n', b'line2\n', b'line3\n'],
 
1375
                           ['line1\n', 'line2\n', 'line3\n'],
1406
1376
                           ('fulltext', False))
1407
1377
        ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1408
1378
        self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1409
1379
                         ann._matching_blocks)
1410
 
        rev2_key = (b'rev2-id',)
1411
 
        record = [b'0,1,1\n', b'new-line\n']
 
1380
        rev2_key = ('rev2-id',)
 
1381
        record = ['0,1,1\n', 'new-line\n']
1412
1382
        details = ('line-delta', False)
1413
 
        ann._expand_record(rev2_key, (parent_key,),
1414
 
                           parent_key, record, details)
 
1383
        ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1415
1384
        self.assertEqual([(1, 1, 2), (3, 3, 0)],
1416
1385
                         ann._matching_blocks[(rev2_key, parent_key)])
1417
1386
 
1418
1387
    def test__get_parent_ann_uses_matching_blocks(self):
1419
1388
        ann = self.make_annotator()
1420
 
        rev_key = (b'rev-id',)
1421
 
        parent_key = (b'parent-id',)
1422
 
        parent_ann = [(parent_key,)] * 3
 
1389
        rev_key = ('rev-id',)
 
1390
        parent_key = ('parent-id',)
 
1391
        parent_ann = [(parent_key,)]*3
1423
1392
        block_key = (rev_key, parent_key)
1424
1393
        ann._annotations_cache[parent_key] = parent_ann
1425
1394
        ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1426
1395
        # We should not try to access any parent_lines content, because we know
1427
1396
        # we already have the matching blocks
1428
1397
        par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1429
 
                                                                  [b'1\n', b'2\n', b'3\n'], parent_key)
 
1398
                                        ['1\n', '2\n', '3\n'], parent_key)
1430
1399
        self.assertEqual(parent_ann, par_ann)
1431
1400
        self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1432
1401
        self.assertEqual({}, ann._matching_blocks)
1433
1402
 
1434
1403
    def test__process_pending(self):
1435
1404
        ann = self.make_annotator()
1436
 
        rev_key = (b'rev-id',)
1437
 
        p1_key = (b'p1-id',)
1438
 
        p2_key = (b'p2-id',)
1439
 
        record = [b'0,1,1\n', b'new-line\n']
 
1405
        rev_key = ('rev-id',)
 
1406
        p1_key = ('p1-id',)
 
1407
        p2_key = ('p2-id',)
 
1408
        record = ['0,1,1\n', 'new-line\n']
1440
1409
        details = ('line-delta', False)
1441
 
        p1_record = [b'line1\n', b'line2\n']
 
1410
        p1_record = ['line1\n', 'line2\n']
1442
1411
        ann._num_compression_children[p1_key] = 1
1443
 
        res = ann._expand_record(rev_key, (p1_key, p2_key), p1_key,
 
1412
        res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1444
1413
                                 record, details)
1445
1414
        self.assertEqual(None, res)
1446
1415
        # self.assertTrue(p1_key in ann._pending_deltas)
1449
1418
        res = ann._expand_record(p1_key, (), None, p1_record,
1450
1419
                                 ('fulltext', False))
1451
1420
        self.assertEqual(p1_record, res)
1452
 
        ann._annotations_cache[p1_key] = [(p1_key,)] * 2
 
1421
        ann._annotations_cache[p1_key] = [(p1_key,)]*2
1453
1422
        res = ann._process_pending(p1_key)
1454
1423
        self.assertEqual([], res)
1455
1424
        self.assertFalse(p1_key in ann._pending_deltas)
1466
1435
 
1467
1436
    def test_record_delta_removes_basis(self):
1468
1437
        ann = self.make_annotator()
1469
 
        ann._expand_record((b'parent-id',), (), None,
1470
 
                           [b'line1\n', b'line2\n'], ('fulltext', False))
1471
 
        ann._num_compression_children[b'parent-id'] = 2
 
1438
        ann._expand_record(('parent-id',), (), None,
 
1439
                           ['line1\n', 'line2\n'], ('fulltext', False))
 
1440
        ann._num_compression_children['parent-id'] = 2
1472
1441
 
1473
1442
    def test_annotate_special_text(self):
1474
1443
        ann = self.make_annotator()
1475
1444
        vf = ann._vf
1476
 
        rev1_key = (b'rev-1',)
1477
 
        rev2_key = (b'rev-2',)
1478
 
        rev3_key = (b'rev-3',)
1479
 
        spec_key = (b'special:',)
1480
 
        vf.add_lines(rev1_key, [], [b'initial content\n'])
1481
 
        vf.add_lines(rev2_key, [rev1_key], [b'initial content\n',
1482
 
                                            b'common content\n',
1483
 
                                            b'content in 2\n'])
1484
 
        vf.add_lines(rev3_key, [rev1_key], [b'initial content\n',
1485
 
                                            b'common content\n',
1486
 
                                            b'content in 3\n'])
1487
 
        spec_text = (b'initial content\n'
1488
 
                     b'common content\n'
1489
 
                     b'content in 2\n'
1490
 
                     b'content in 3\n')
 
1445
        rev1_key = ('rev-1',)
 
1446
        rev2_key = ('rev-2',)
 
1447
        rev3_key = ('rev-3',)
 
1448
        spec_key = ('special:',)
 
1449
        vf.add_lines(rev1_key, [], ['initial content\n'])
 
1450
        vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
 
1451
                                            'common content\n',
 
1452
                                            'content in 2\n'])
 
1453
        vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
 
1454
                                            'common content\n',
 
1455
                                            'content in 3\n'])
 
1456
        spec_text = ('initial content\n'
 
1457
                     'common content\n'
 
1458
                     'content in 2\n'
 
1459
                     'content in 3\n')
1491
1460
        ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1492
1461
        anns, lines = ann.annotate(spec_key)
1493
1462
        self.assertEqual([(rev1_key,),
1494
1463
                          (rev2_key, rev3_key),
1495
1464
                          (rev2_key,),
1496
1465
                          (rev3_key,),
1497
 
                          ], anns)
1498
 
        self.assertEqualDiff(spec_text, b''.join(lines))
 
1466
                         ], anns)
 
1467
        self.assertEqualDiff(spec_text, ''.join(lines))
1499
1468
 
1500
1469
 
1501
1470
class KnitTests(TestCaseWithTransport):
1517
1486
            raise TestNotApplicable(
1518
1487
                "cannot get delta-caused sha failures without deltas.")
1519
1488
        # create a basis
1520
 
        basis = (b'basis',)
1521
 
        broken = (b'broken',)
1522
 
        source.add_lines(basis, (), [b'foo\n'])
1523
 
        source.add_lines(broken, (basis,), [b'foo\n', b'bar\n'])
 
1489
        basis = ('basis',)
 
1490
        broken = ('broken',)
 
1491
        source.add_lines(basis, (), ['foo\n'])
 
1492
        source.add_lines(broken, (basis,), ['foo\n', 'bar\n'])
1524
1493
        # Seed target with a bad basis text
1525
 
        target.add_lines(basis, (), [b'gam\n'])
 
1494
        target.add_lines(basis, (), ['gam\n'])
1526
1495
        target.insert_record_stream(
1527
1496
            source.get_record_stream([broken], 'unordered', False))
1528
 
        err = self.assertRaises(KnitCorrupt,
1529
 
                                next(target.get_record_stream([broken], 'unordered', True
1530
 
                                                              )).get_bytes_as, 'chunked')
1531
 
        self.assertEqual([b'gam\n', b'bar\n'], err.content)
 
1497
        err = self.assertRaises(errors.KnitCorrupt,
 
1498
            target.get_record_stream([broken], 'unordered', True
 
1499
            ).next().get_bytes_as, 'chunked')
 
1500
        self.assertEqual(['gam\n', 'bar\n'], err.content)
1532
1501
        # Test for formatting with live data
1533
1502
        self.assertStartsWith(str(err), "Knit ")
1534
1503
 
1539
1508
        """Adding versions to the index should update the lookup dict"""
1540
1509
        knit = self.make_test_knit()
1541
1510
        idx = knit._index
1542
 
        idx.add_records([((b'a-1',), [b'fulltext'], ((b'a-1',), 0, 0), [])])
 
1511
        idx.add_records([(('a-1',), ['fulltext'], (('a-1',), 0, 0), [])])
1543
1512
        self.check_file_contents('test.kndx',
1544
 
                                 b'# bzr knit index 8\n'
1545
 
                                 b'\n'
1546
 
                                 b'a-1 fulltext 0 0  :'
1547
 
                                 )
 
1513
            '# bzr knit index 8\n'
 
1514
            '\n'
 
1515
            'a-1 fulltext 0 0  :'
 
1516
            )
1548
1517
        idx.add_records([
1549
 
            ((b'a-2',), [b'fulltext'], ((b'a-2',), 0, 0), [(b'a-1',)]),
1550
 
            ((b'a-3',), [b'fulltext'], ((b'a-3',), 0, 0), [(b'a-2',)]),
 
1518
            (('a-2',), ['fulltext'], (('a-2',), 0, 0), [('a-1',)]),
 
1519
            (('a-3',), ['fulltext'], (('a-3',), 0, 0), [('a-2',)]),
1551
1520
            ])
1552
1521
        self.check_file_contents('test.kndx',
1553
 
                                 b'# bzr knit index 8\n'
1554
 
                                 b'\n'
1555
 
                                 b'a-1 fulltext 0 0  :\n'
1556
 
                                 b'a-2 fulltext 0 0 0 :\n'
1557
 
                                 b'a-3 fulltext 0 0 1 :'
1558
 
                                 )
1559
 
        self.assertEqual({(b'a-3',), (b'a-1',), (b'a-2',)}, idx.keys())
 
1522
            '# bzr knit index 8\n'
 
1523
            '\n'
 
1524
            'a-1 fulltext 0 0  :\n'
 
1525
            'a-2 fulltext 0 0 0 :\n'
 
1526
            'a-3 fulltext 0 0 1 :'
 
1527
            )
 
1528
        self.assertEqual(set([('a-3',), ('a-1',), ('a-2',)]), idx.keys())
1560
1529
        self.assertEqual({
1561
 
            (b'a-1',): (((b'a-1',), 0, 0), None, (), ('fulltext', False)),
1562
 
            (b'a-2',): (((b'a-2',), 0, 0), None, ((b'a-1',),), ('fulltext', False)),
1563
 
            (b'a-3',): (((b'a-3',), 0, 0), None, ((b'a-2',),), ('fulltext', False)),
 
1530
            ('a-1',): ((('a-1',), 0, 0), None, (), ('fulltext', False)),
 
1531
            ('a-2',): ((('a-2',), 0, 0), None, (('a-1',),), ('fulltext', False)),
 
1532
            ('a-3',): ((('a-3',), 0, 0), None, (('a-2',),), ('fulltext', False)),
1564
1533
            }, idx.get_build_details(idx.keys()))
1565
 
        self.assertEqual({(b'a-1',): (),
1566
 
                          (b'a-2',): ((b'a-1',),),
1567
 
                          (b'a-3',): ((b'a-2',),), },
1568
 
                         idx.get_parent_map(idx.keys()))
 
1534
        self.assertEqual({('a-1',):(),
 
1535
            ('a-2',):(('a-1',),),
 
1536
            ('a-3',):(('a-2',),),},
 
1537
            idx.get_parent_map(idx.keys()))
1569
1538
 
1570
1539
    def test_add_versions_fails_clean(self):
1571
1540
        """If add_versions fails in the middle, it restores a pristine state.
1581
1550
 
1582
1551
        knit = self.make_test_knit()
1583
1552
        idx = knit._index
1584
 
        idx.add_records([((b'a-1',), [b'fulltext'], ((b'a-1',), 0, 0), [])])
 
1553
        idx.add_records([(('a-1',), ['fulltext'], (('a-1',), 0, 0), [])])
1585
1554
 
1586
1555
        class StopEarly(Exception):
1587
1556
            pass
1588
1557
 
1589
1558
        def generate_failure():
1590
1559
            """Add some entries and then raise an exception"""
1591
 
            yield ((b'a-2',), [b'fulltext'], (None, 0, 0), (b'a-1',))
1592
 
            yield ((b'a-3',), [b'fulltext'], (None, 0, 0), (b'a-2',))
 
1560
            yield (('a-2',), ['fulltext'], (None, 0, 0), ('a-1',))
 
1561
            yield (('a-3',), ['fulltext'], (None, 0, 0), ('a-2',))
1593
1562
            raise StopEarly()
1594
1563
 
1595
1564
        # Assert the pre-condition
1596
1565
        def assertA1Only():
1597
 
            self.assertEqual({(b'a-1',)}, set(idx.keys()))
 
1566
            self.assertEqual(set([('a-1',)]), set(idx.keys()))
1598
1567
            self.assertEqual(
1599
 
                {(b'a-1',): (((b'a-1',), 0, 0), None, (), ('fulltext', False))},
1600
 
                idx.get_build_details([(b'a-1',)]))
1601
 
            self.assertEqual({(b'a-1',): ()}, idx.get_parent_map(idx.keys()))
 
1568
                {('a-1',): ((('a-1',), 0, 0), None, (), ('fulltext', False))},
 
1569
                idx.get_build_details([('a-1',)]))
 
1570
            self.assertEqual({('a-1',):()}, idx.get_parent_map(idx.keys()))
1602
1571
 
1603
1572
        assertA1Only()
1604
1573
        self.assertRaises(StopEarly, idx.add_records, generate_failure())
1610
1579
        # could leave an empty .kndx file, which bzr would later claim was a
1611
1580
        # corrupted file since the header was not present. In reality, the file
1612
1581
        # just wasn't created, so it should be ignored.
1613
 
        t = transport.get_transport_from_path('.')
1614
 
        t.put_bytes('test.kndx', b'')
 
1582
        t = get_transport('.')
 
1583
        t.put_bytes('test.kndx', '')
1615
1584
 
1616
1585
        knit = self.make_test_knit()
1617
1586
 
1618
1587
    def test_knit_index_checks_header(self):
1619
 
        t = transport.get_transport_from_path('.')
1620
 
        t.put_bytes('test.kndx', b'# not really a knit header\n\n')
 
1588
        t = get_transport('.')
 
1589
        t.put_bytes('test.kndx', '# not really a knit header\n\n')
1621
1590
        k = self.make_test_knit()
1622
1591
        self.assertRaises(KnitHeaderError, k.keys)
1623
1592
 
1644
1613
        if deltas:
1645
1614
            # delta compression inn the index
1646
1615
            index1 = self.make_g_index('1', 2, [
1647
 
                ((b'tip', ), b'N0 100', ([(b'parent', )], [], )),
1648
 
                ((b'tail', ), b'', ([], []))])
 
1616
                (('tip', ), 'N0 100', ([('parent', )], [], )),
 
1617
                (('tail', ), '', ([], []))])
1649
1618
            index2 = self.make_g_index('2', 2, [
1650
 
                ((b'parent', ), b' 100 78',
1651
 
                 ([(b'tail', ), (b'ghost', )], [(b'tail', )])),
1652
 
                ((b'separate', ), b'', ([], []))])
 
1619
                (('parent', ), ' 100 78', ([('tail', ), ('ghost', )], [('tail', )])),
 
1620
                (('separate', ), '', ([], []))])
1653
1621
        else:
1654
1622
            # just blob location and graph in the index.
1655
1623
            index1 = self.make_g_index('1', 1, [
1656
 
                ((b'tip', ), b'N0 100', ([(b'parent', )], )),
1657
 
                ((b'tail', ), b'', ([], ))])
 
1624
                (('tip', ), 'N0 100', ([('parent', )], )),
 
1625
                (('tail', ), '', ([], ))])
1658
1626
            index2 = self.make_g_index('2', 1, [
1659
 
                ((b'parent', ), b' 100 78', ([(b'tail', ), (b'ghost', )], )),
1660
 
                ((b'separate', ), b'', ([], ))])
 
1627
                (('parent', ), ' 100 78', ([('tail', ), ('ghost', )], )),
 
1628
                (('separate', ), '', ([], ))])
1661
1629
        combined_index = CombinedGraphIndex([index1, index2])
1662
1630
        if catch_adds:
1663
1631
            self.combined_index = combined_index
1665
1633
            add_callback = self.catch_add
1666
1634
        else:
1667
1635
            add_callback = None
1668
 
        return _KnitGraphIndex(combined_index, lambda: True, deltas=deltas,
1669
 
                               add_callback=add_callback)
 
1636
        return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
 
1637
            add_callback=add_callback)
1670
1638
 
1671
1639
    def test_keys(self):
1672
1640
        index = self.two_graph_index()
1673
 
        self.assertEqual({(b'tail',), (b'tip',), (b'parent',), (b'separate',)},
1674
 
                         set(index.keys()))
 
1641
        self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
 
1642
            set(index.keys()))
1675
1643
 
1676
1644
    def test_get_position(self):
1677
1645
        index = self.two_graph_index()
1678
 
        self.assertEqual(
1679
 
            (index._graph_index._indices[0], 0, 100), index.get_position((b'tip',)))
1680
 
        self.assertEqual(
1681
 
            (index._graph_index._indices[1], 100, 78), index.get_position((b'parent',)))
 
1646
        self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
 
1647
        self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
1682
1648
 
1683
1649
    def test_get_method_deltas(self):
1684
1650
        index = self.two_graph_index(deltas=True)
1685
 
        self.assertEqual('fulltext', index.get_method((b'tip',)))
1686
 
        self.assertEqual('line-delta', index.get_method((b'parent',)))
 
1651
        self.assertEqual('fulltext', index.get_method(('tip',)))
 
1652
        self.assertEqual('line-delta', index.get_method(('parent',)))
1687
1653
 
1688
1654
    def test_get_method_no_deltas(self):
1689
1655
        # check that the parent-history lookup is ignored with deltas=False.
1690
1656
        index = self.two_graph_index(deltas=False)
1691
 
        self.assertEqual('fulltext', index.get_method((b'tip',)))
1692
 
        self.assertEqual('fulltext', index.get_method((b'parent',)))
 
1657
        self.assertEqual('fulltext', index.get_method(('tip',)))
 
1658
        self.assertEqual('fulltext', index.get_method(('parent',)))
1693
1659
 
1694
1660
    def test_get_options_deltas(self):
1695
1661
        index = self.two_graph_index(deltas=True)
1696
 
        self.assertEqual([b'fulltext', b'no-eol'],
1697
 
                         index.get_options((b'tip',)))
1698
 
        self.assertEqual([b'line-delta'], index.get_options((b'parent',)))
 
1662
        self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
 
1663
        self.assertEqual(['line-delta'], index.get_options(('parent',)))
1699
1664
 
1700
1665
    def test_get_options_no_deltas(self):
1701
1666
        # check that the parent-history lookup is ignored with deltas=False.
1702
1667
        index = self.two_graph_index(deltas=False)
1703
 
        self.assertEqual([b'fulltext', b'no-eol'],
1704
 
                         index.get_options((b'tip',)))
1705
 
        self.assertEqual([b'fulltext'], index.get_options((b'parent',)))
 
1668
        self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
 
1669
        self.assertEqual(['fulltext'], index.get_options(('parent',)))
1706
1670
 
1707
1671
    def test_get_parent_map(self):
1708
1672
        index = self.two_graph_index()
1709
 
        self.assertEqual({(b'parent',): ((b'tail',), (b'ghost',))},
1710
 
                         index.get_parent_map([(b'parent',), (b'ghost',)]))
 
1673
        self.assertEqual({('parent',):(('tail',), ('ghost',))},
 
1674
            index.get_parent_map([('parent',), ('ghost',)]))
1711
1675
 
1712
1676
    def catch_add(self, entries):
1713
1677
        self.caught_entries.append(entries)
1715
1679
    def test_add_no_callback_errors(self):
1716
1680
        index = self.two_graph_index()
1717
1681
        self.assertRaises(errors.ReadOnlyError, index.add_records,
1718
 
                          [((b'new',), b'fulltext,no-eol', (None, 50, 60), [b'separate'])])
 
1682
            [(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
1719
1683
 
1720
1684
    def test_add_version_smoke(self):
1721
1685
        index = self.two_graph_index(catch_adds=True)
1722
 
        index.add_records([((b'new',), b'fulltext,no-eol', (None, 50, 60),
1723
 
                            [(b'separate',)])])
1724
 
        self.assertEqual([[((b'new', ), b'N50 60', (((b'separate',),),))]],
1725
 
                         self.caught_entries)
 
1686
        index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
 
1687
            [('separate',)])])
 
1688
        self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
 
1689
            self.caught_entries)
1726
1690
 
1727
1691
    def test_add_version_delta_not_delta_index(self):
1728
1692
        index = self.two_graph_index(catch_adds=True)
1729
 
        self.assertRaises(KnitCorrupt, index.add_records,
1730
 
                          [((b'new',), b'no-eol,line-delta', (None, 0, 100), [(b'parent',)])])
 
1693
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1694
            [(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1731
1695
        self.assertEqual([], self.caught_entries)
1732
1696
 
1733
1697
    def test_add_version_same_dup(self):
1734
1698
        index = self.two_graph_index(catch_adds=True)
1735
1699
        # options can be spelt two different ways
1736
 
        index.add_records(
1737
 
            [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [(b'parent',)])])
1738
 
        index.add_records(
1739
 
            [((b'tip',), b'no-eol,fulltext', (None, 0, 100), [(b'parent',)])])
 
1700
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
 
1701
        index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1740
1702
        # position/length are ignored (because each pack could have fulltext or
1741
1703
        # delta, and be at a different position.
1742
 
        index.add_records([((b'tip',), b'fulltext,no-eol', (None, 50, 100),
1743
 
                            [(b'parent',)])])
1744
 
        index.add_records([((b'tip',), b'fulltext,no-eol', (None, 0, 1000),
1745
 
                            [(b'parent',)])])
 
1704
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
 
1705
            [('parent',)])])
 
1706
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
 
1707
            [('parent',)])])
1746
1708
        # but neither should have added data:
1747
1709
        self.assertEqual([[], [], [], []], self.caught_entries)
1748
1710
 
1749
1711
    def test_add_version_different_dup(self):
1750
1712
        index = self.two_graph_index(deltas=True, catch_adds=True)
1751
1713
        # change options
1752
 
        self.assertRaises(KnitCorrupt, index.add_records,
1753
 
                          [((b'tip',), b'line-delta', (None, 0, 100), [(b'parent',)])])
1754
 
        self.assertRaises(KnitCorrupt, index.add_records,
1755
 
                          [((b'tip',), b'fulltext', (None, 0, 100), [(b'parent',)])])
 
1714
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1715
            [(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
 
1716
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1717
            [(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
1756
1718
        # parents
1757
 
        self.assertRaises(KnitCorrupt, index.add_records,
1758
 
                          [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [])])
 
1719
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1720
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1759
1721
        self.assertEqual([], self.caught_entries)
1760
1722
 
1761
1723
    def test_add_versions_nodeltas(self):
1762
1724
        index = self.two_graph_index(catch_adds=True)
1763
1725
        index.add_records([
1764
 
            ((b'new',), b'fulltext,no-eol', (None, 50, 60), [(b'separate',)]),
1765
 
            ((b'new2',), b'fulltext', (None, 0, 6), [(b'new',)]),
1766
 
            ])
1767
 
        self.assertEqual([((b'new', ), b'N50 60', (((b'separate',),),)),
1768
 
                          ((b'new2', ), b' 0 6', (((b'new',),),))],
1769
 
                         sorted(self.caught_entries[0]))
 
1726
                (('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
 
1727
                (('new2',), 'fulltext', (None, 0, 6), [('new',)]),
 
1728
                ])
 
1729
        self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
 
1730
            (('new2', ), ' 0 6', ((('new',),),))],
 
1731
            sorted(self.caught_entries[0]))
1770
1732
        self.assertEqual(1, len(self.caught_entries))
1771
1733
 
1772
1734
    def test_add_versions_deltas(self):
1773
1735
        index = self.two_graph_index(deltas=True, catch_adds=True)
1774
1736
        index.add_records([
1775
 
            ((b'new',), b'fulltext,no-eol', (None, 50, 60), [(b'separate',)]),
1776
 
            ((b'new2',), b'line-delta', (None, 0, 6), [(b'new',)]),
1777
 
            ])
1778
 
        self.assertEqual([((b'new', ), b'N50 60', (((b'separate',),), ())),
1779
 
                          ((b'new2', ), b' 0 6', (((b'new',),), ((b'new',),), ))],
1780
 
                         sorted(self.caught_entries[0]))
 
1737
                (('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
 
1738
                (('new2',), 'line-delta', (None, 0, 6), [('new',)]),
 
1739
                ])
 
1740
        self.assertEqual([(('new', ), 'N50 60', ((('separate',),), ())),
 
1741
            (('new2', ), ' 0 6', ((('new',),), (('new',),), ))],
 
1742
            sorted(self.caught_entries[0]))
1781
1743
        self.assertEqual(1, len(self.caught_entries))
1782
1744
 
1783
1745
    def test_add_versions_delta_not_delta_index(self):
1784
1746
        index = self.two_graph_index(catch_adds=True)
1785
 
        self.assertRaises(KnitCorrupt, index.add_records,
1786
 
                          [((b'new',), b'no-eol,line-delta', (None, 0, 100), [(b'parent',)])])
 
1747
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1748
            [(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1787
1749
        self.assertEqual([], self.caught_entries)
1788
1750
 
1789
1751
    def test_add_versions_random_id_accepted(self):
1793
1755
    def test_add_versions_same_dup(self):
1794
1756
        index = self.two_graph_index(catch_adds=True)
1795
1757
        # options can be spelt two different ways
1796
 
        index.add_records([((b'tip',), b'fulltext,no-eol', (None, 0, 100),
1797
 
                            [(b'parent',)])])
1798
 
        index.add_records([((b'tip',), b'no-eol,fulltext', (None, 0, 100),
1799
 
                            [(b'parent',)])])
 
1758
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
 
1759
            [('parent',)])])
 
1760
        index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
 
1761
            [('parent',)])])
1800
1762
        # position/length are ignored (because each pack could have fulltext or
1801
1763
        # delta, and be at a different position.
1802
 
        index.add_records([((b'tip',), b'fulltext,no-eol', (None, 50, 100),
1803
 
                            [(b'parent',)])])
1804
 
        index.add_records([((b'tip',), b'fulltext,no-eol', (None, 0, 1000),
1805
 
                            [(b'parent',)])])
 
1764
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
 
1765
            [('parent',)])])
 
1766
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
 
1767
            [('parent',)])])
1806
1768
        # but neither should have added data.
1807
1769
        self.assertEqual([[], [], [], []], self.caught_entries)
1808
1770
 
1809
1771
    def test_add_versions_different_dup(self):
1810
1772
        index = self.two_graph_index(deltas=True, catch_adds=True)
1811
1773
        # change options
1812
 
        self.assertRaises(KnitCorrupt, index.add_records,
1813
 
                          [((b'tip',), b'line-delta', (None, 0, 100), [(b'parent',)])])
1814
 
        self.assertRaises(KnitCorrupt, index.add_records,
1815
 
                          [((b'tip',), b'fulltext', (None, 0, 100), [(b'parent',)])])
 
1774
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1775
            [(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
 
1776
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1777
            [(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
1816
1778
        # parents
1817
 
        self.assertRaises(KnitCorrupt, index.add_records,
1818
 
                          [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [])])
 
1779
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1780
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1819
1781
        # change options in the second record
1820
 
        self.assertRaises(KnitCorrupt, index.add_records,
1821
 
                          [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [(b'parent',)]),
1822
 
                           ((b'tip',), b'line-delta', (None, 0, 100), [(b'parent',)])])
 
1782
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1783
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
 
1784
             (('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1823
1785
        self.assertEqual([], self.caught_entries)
1824
1786
 
1825
1787
    def make_g_index_missing_compression_parent(self):
1826
1788
        graph_index = self.make_g_index('missing_comp', 2,
1827
 
                                        [((b'tip', ), b' 100 78',
1828
 
                                          ([(b'missing-parent', ), (b'ghost', )], [(b'missing-parent', )]))])
 
1789
            [(('tip', ), ' 100 78',
 
1790
              ([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1829
1791
        return graph_index
1830
1792
 
1831
1793
    def make_g_index_missing_parent(self):
1832
1794
        graph_index = self.make_g_index('missing_parent', 2,
1833
 
                                        [((b'parent', ), b' 100 78', ([], [])),
1834
 
                                         ((b'tip', ), b' 100 78',
1835
 
                                            ([(b'parent', ), (b'missing-parent', )], [(b'parent', )])),
1836
 
                                         ])
 
1795
            [(('parent', ), ' 100 78', ([], [])),
 
1796
             (('tip', ), ' 100 78',
 
1797
              ([('parent', ), ('missing-parent', )], [('parent', )])),
 
1798
              ])
1837
1799
        return graph_index
1838
1800
 
1839
1801
    def make_g_index_no_external_refs(self):
1840
1802
        graph_index = self.make_g_index('no_external_refs', 2,
1841
 
                                        [((b'rev', ), b' 100 78',
1842
 
                                          ([(b'parent', ), (b'ghost', )], []))])
 
1803
            [(('rev', ), ' 100 78',
 
1804
              ([('parent', ), ('ghost', )], []))])
1843
1805
        return graph_index
1844
1806
 
1845
1807
    def test_add_good_unvalidated_index(self):
1858
1820
        # examined, otherwise 'ghost' would also be reported as a missing
1859
1821
        # parent.
1860
1822
        self.assertEqual(
1861
 
            frozenset([(b'missing-parent',)]),
 
1823
            frozenset([('missing-parent',)]),
1862
1824
            index.get_missing_compression_parents())
1863
1825
 
1864
1826
    def test_add_missing_noncompression_parent_unvalidated_index(self):
1865
1827
        unvalidated = self.make_g_index_missing_parent()
1866
1828
        combined = CombinedGraphIndex([unvalidated])
1867
1829
        index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1868
 
                                track_external_parent_refs=True)
 
1830
            track_external_parent_refs=True)
1869
1831
        index.scan_unvalidated_index(unvalidated)
1870
1832
        self.assertEqual(
1871
 
            frozenset([(b'missing-parent',)]), index.get_missing_parents())
 
1833
            frozenset([('missing-parent',)]), index.get_missing_parents())
1872
1834
 
1873
1835
    def test_track_external_parent_refs(self):
1874
1836
        g_index = self.make_g_index('empty', 2, [])
1875
1837
        combined = CombinedGraphIndex([g_index])
1876
1838
        index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1877
 
                                add_callback=self.catch_add, track_external_parent_refs=True)
 
1839
            add_callback=self.catch_add, track_external_parent_refs=True)
1878
1840
        self.caught_entries = []
1879
1841
        index.add_records([
1880
 
            ((b'new-key',), b'fulltext,no-eol', (None, 50, 60),
1881
 
             [(b'parent-1',), (b'parent-2',)])])
 
1842
            (('new-key',), 'fulltext,no-eol', (None, 50, 60),
 
1843
             [('parent-1',), ('parent-2',)])])
1882
1844
        self.assertEqual(
1883
 
            frozenset([(b'parent-1',), (b'parent-2',)]),
 
1845
            frozenset([('parent-1',), ('parent-2',)]),
1884
1846
            index.get_missing_parents())
1885
1847
 
1886
1848
    def test_add_unvalidated_index_with_present_external_references(self):
1894
1856
        self.assertEqual(frozenset(), index.get_missing_compression_parents())
1895
1857
 
1896
1858
    def make_new_missing_parent_g_index(self, name):
1897
 
        missing_parent = name.encode('ascii') + b'-missing-parent'
 
1859
        missing_parent = name + '-missing-parent'
1898
1860
        graph_index = self.make_g_index(name, 2,
1899
 
                                        [((name.encode('ascii') + b'tip', ), b' 100 78',
1900
 
                                          ([(missing_parent, ), (b'ghost', )], [(missing_parent, )]))])
 
1861
            [((name + 'tip', ), ' 100 78',
 
1862
              ([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
1901
1863
        return graph_index
1902
1864
 
1903
1865
    def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
1908
1870
        index.scan_unvalidated_index(g_index_1)
1909
1871
        index.scan_unvalidated_index(g_index_2)
1910
1872
        self.assertEqual(
1911
 
            frozenset([(b'one-missing-parent',), (b'two-missing-parent',)]),
 
1873
            frozenset([('one-missing-parent',), ('two-missing-parent',)]),
1912
1874
            index.get_missing_compression_parents())
1913
1875
 
1914
1876
    def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
1915
1877
        graph_index_a = self.make_g_index('one', 2,
1916
 
                                          [((b'parent-one', ), b' 100 78', ([(b'non-compression-parent',)], [])),
1917
 
                                           ((b'child-of-two', ), b' 100 78',
1918
 
                                              ([(b'parent-two',)], [(b'parent-two',)]))])
 
1878
            [(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
 
1879
             (('child-of-two', ), ' 100 78',
 
1880
              ([('parent-two',)], [('parent-two',)]))])
1919
1881
        graph_index_b = self.make_g_index('two', 2,
1920
 
                                          [((b'parent-two', ), b' 100 78', ([(b'non-compression-parent',)], [])),
1921
 
                                           ((b'child-of-one', ), b' 100 78',
1922
 
                                              ([(b'parent-one',)], [(b'parent-one',)]))])
 
1882
            [(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
 
1883
             (('child-of-one', ), ' 100 78',
 
1884
              ([('parent-one',)], [('parent-one',)]))])
1923
1885
        combined = CombinedGraphIndex([graph_index_a, graph_index_b])
1924
1886
        index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1925
1887
        index.scan_unvalidated_index(graph_index_a)
1946
1908
        index = _KnitGraphIndex(combined, lambda: True, parents=False)
1947
1909
        index.scan_unvalidated_index(unvalidated)
1948
1910
        self.assertEqual(frozenset(),
1949
 
                         index.get_missing_compression_parents())
 
1911
            index.get_missing_compression_parents())
1950
1912
 
1951
1913
    def test_parents_deltas_incompatible(self):
1952
1914
        index = CombinedGraphIndex([])
1953
 
        self.assertRaises(knit.KnitError, _KnitGraphIndex, lambda: True,
1954
 
                          index, deltas=True, parents=False)
 
1915
        self.assertRaises(errors.KnitError, _KnitGraphIndex, lambda:True,
 
1916
            index, deltas=True, parents=False)
1955
1917
 
1956
1918
    def two_graph_index(self, catch_adds=False):
1957
1919
        """Build a two-graph index.
1961
1923
        """
1962
1924
        # put several versions in the index.
1963
1925
        index1 = self.make_g_index('1', 0, [
1964
 
            ((b'tip', ), b'N0 100'),
1965
 
            ((b'tail', ), b'')])
 
1926
            (('tip', ), 'N0 100'),
 
1927
            (('tail', ), '')])
1966
1928
        index2 = self.make_g_index('2', 0, [
1967
 
            ((b'parent', ), b' 100 78'),
1968
 
            ((b'separate', ), b'')])
 
1929
            (('parent', ), ' 100 78'),
 
1930
            (('separate', ), '')])
1969
1931
        combined_index = CombinedGraphIndex([index1, index2])
1970
1932
        if catch_adds:
1971
1933
            self.combined_index = combined_index
1973
1935
            add_callback = self.catch_add
1974
1936
        else:
1975
1937
            add_callback = None
1976
 
        return _KnitGraphIndex(combined_index, lambda: True, parents=False,
1977
 
                               add_callback=add_callback)
 
1938
        return _KnitGraphIndex(combined_index, lambda:True, parents=False,
 
1939
            add_callback=add_callback)
1978
1940
 
1979
1941
    def test_keys(self):
1980
1942
        index = self.two_graph_index()
1981
 
        self.assertEqual({(b'tail',), (b'tip',), (b'parent',), (b'separate',)},
1982
 
                         set(index.keys()))
 
1943
        self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
 
1944
            set(index.keys()))
1983
1945
 
1984
1946
    def test_get_position(self):
1985
1947
        index = self.two_graph_index()
1986
1948
        self.assertEqual((index._graph_index._indices[0], 0, 100),
1987
 
                         index.get_position((b'tip',)))
 
1949
            index.get_position(('tip',)))
1988
1950
        self.assertEqual((index._graph_index._indices[1], 100, 78),
1989
 
                         index.get_position((b'parent',)))
 
1951
            index.get_position(('parent',)))
1990
1952
 
1991
1953
    def test_get_method(self):
1992
1954
        index = self.two_graph_index()
1993
 
        self.assertEqual('fulltext', index.get_method((b'tip',)))
1994
 
        self.assertEqual([b'fulltext'], index.get_options((b'parent',)))
 
1955
        self.assertEqual('fulltext', index.get_method(('tip',)))
 
1956
        self.assertEqual(['fulltext'], index.get_options(('parent',)))
1995
1957
 
1996
1958
    def test_get_options(self):
1997
1959
        index = self.two_graph_index()
1998
 
        self.assertEqual([b'fulltext', b'no-eol'],
1999
 
                         index.get_options((b'tip',)))
2000
 
        self.assertEqual([b'fulltext'], index.get_options((b'parent',)))
 
1960
        self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
 
1961
        self.assertEqual(['fulltext'], index.get_options(('parent',)))
2001
1962
 
2002
1963
    def test_get_parent_map(self):
2003
1964
        index = self.two_graph_index()
2004
 
        self.assertEqual({(b'parent',): None},
2005
 
                         index.get_parent_map([(b'parent',), (b'ghost',)]))
 
1965
        self.assertEqual({('parent',):None},
 
1966
            index.get_parent_map([('parent',), ('ghost',)]))
2006
1967
 
2007
1968
    def catch_add(self, entries):
2008
1969
        self.caught_entries.append(entries)
2010
1971
    def test_add_no_callback_errors(self):
2011
1972
        index = self.two_graph_index()
2012
1973
        self.assertRaises(errors.ReadOnlyError, index.add_records,
2013
 
                          [((b'new',), b'fulltext,no-eol', (None, 50, 60), [(b'separate',)])])
 
1974
            [(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)])])
2014
1975
 
2015
1976
    def test_add_version_smoke(self):
2016
1977
        index = self.two_graph_index(catch_adds=True)
2017
 
        index.add_records(
2018
 
            [((b'new',), b'fulltext,no-eol', (None, 50, 60), [])])
2019
 
        self.assertEqual([[((b'new', ), b'N50 60')]],
2020
 
                         self.caught_entries)
 
1978
        index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60), [])])
 
1979
        self.assertEqual([[(('new', ), 'N50 60')]],
 
1980
            self.caught_entries)
2021
1981
 
2022
1982
    def test_add_version_delta_not_delta_index(self):
2023
1983
        index = self.two_graph_index(catch_adds=True)
2024
 
        self.assertRaises(KnitCorrupt, index.add_records,
2025
 
                          [((b'new',), b'no-eol,line-delta', (None, 0, 100), [])])
 
1984
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
1985
            [(('new',), 'no-eol,line-delta', (None, 0, 100), [])])
2026
1986
        self.assertEqual([], self.caught_entries)
2027
1987
 
2028
1988
    def test_add_version_same_dup(self):
2029
1989
        index = self.two_graph_index(catch_adds=True)
2030
1990
        # options can be spelt two different ways
2031
 
        index.add_records(
2032
 
            [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [])])
2033
 
        index.add_records(
2034
 
            [((b'tip',), b'no-eol,fulltext', (None, 0, 100), [])])
 
1991
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
 
1992
        index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2035
1993
        # position/length are ignored (because each pack could have fulltext or
2036
1994
        # delta, and be at a different position.
2037
 
        index.add_records(
2038
 
            [((b'tip',), b'fulltext,no-eol', (None, 50, 100), [])])
2039
 
        index.add_records(
2040
 
            [((b'tip',), b'fulltext,no-eol', (None, 0, 1000), [])])
 
1995
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
 
1996
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2041
1997
        # but neither should have added data.
2042
1998
        self.assertEqual([[], [], [], []], self.caught_entries)
2043
1999
 
2044
2000
    def test_add_version_different_dup(self):
2045
2001
        index = self.two_graph_index(catch_adds=True)
2046
2002
        # change options
2047
 
        self.assertRaises(KnitCorrupt, index.add_records,
2048
 
                          [((b'tip',), b'no-eol,line-delta', (None, 0, 100), [])])
2049
 
        self.assertRaises(KnitCorrupt, index.add_records,
2050
 
                          [((b'tip',), b'line-delta,no-eol', (None, 0, 100), [])])
2051
 
        self.assertRaises(KnitCorrupt, index.add_records,
2052
 
                          [((b'tip',), b'fulltext', (None, 0, 100), [])])
 
2003
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2004
            [(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
 
2005
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2006
            [(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
 
2007
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2008
            [(('tip',), 'fulltext', (None, 0, 100), [])])
2053
2009
        # parents
2054
 
        self.assertRaises(KnitCorrupt, index.add_records,
2055
 
                          [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [(b'parent',)])])
 
2010
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2011
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2056
2012
        self.assertEqual([], self.caught_entries)
2057
2013
 
2058
2014
    def test_add_versions(self):
2059
2015
        index = self.two_graph_index(catch_adds=True)
2060
2016
        index.add_records([
2061
 
            ((b'new',), b'fulltext,no-eol', (None, 50, 60), []),
2062
 
            ((b'new2',), b'fulltext', (None, 0, 6), []),
2063
 
            ])
2064
 
        self.assertEqual([((b'new', ), b'N50 60'), ((b'new2', ), b' 0 6')],
2065
 
                         sorted(self.caught_entries[0]))
 
2017
                (('new',), 'fulltext,no-eol', (None, 50, 60), []),
 
2018
                (('new2',), 'fulltext', (None, 0, 6), []),
 
2019
                ])
 
2020
        self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
 
2021
            sorted(self.caught_entries[0]))
2066
2022
        self.assertEqual(1, len(self.caught_entries))
2067
2023
 
2068
2024
    def test_add_versions_delta_not_delta_index(self):
2069
2025
        index = self.two_graph_index(catch_adds=True)
2070
 
        self.assertRaises(KnitCorrupt, index.add_records,
2071
 
                          [((b'new',), b'no-eol,line-delta', (None, 0, 100), [(b'parent',)])])
 
2026
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2027
            [(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2072
2028
        self.assertEqual([], self.caught_entries)
2073
2029
 
2074
2030
    def test_add_versions_parents_not_parents_index(self):
2075
2031
        index = self.two_graph_index(catch_adds=True)
2076
 
        self.assertRaises(KnitCorrupt, index.add_records,
2077
 
                          [((b'new',), b'no-eol,fulltext', (None, 0, 100), [(b'parent',)])])
 
2032
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2033
            [(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
2078
2034
        self.assertEqual([], self.caught_entries)
2079
2035
 
2080
2036
    def test_add_versions_random_id_accepted(self):
2084
2040
    def test_add_versions_same_dup(self):
2085
2041
        index = self.two_graph_index(catch_adds=True)
2086
2042
        # options can be spelt two different ways
2087
 
        index.add_records(
2088
 
            [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [])])
2089
 
        index.add_records(
2090
 
            [((b'tip',), b'no-eol,fulltext', (None, 0, 100), [])])
 
2043
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
 
2044
        index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2091
2045
        # position/length are ignored (because each pack could have fulltext or
2092
2046
        # delta, and be at a different position.
2093
 
        index.add_records(
2094
 
            [((b'tip',), b'fulltext,no-eol', (None, 50, 100), [])])
2095
 
        index.add_records(
2096
 
            [((b'tip',), b'fulltext,no-eol', (None, 0, 1000), [])])
 
2047
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
 
2048
        index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2097
2049
        # but neither should have added data.
2098
2050
        self.assertEqual([[], [], [], []], self.caught_entries)
2099
2051
 
2100
2052
    def test_add_versions_different_dup(self):
2101
2053
        index = self.two_graph_index(catch_adds=True)
2102
2054
        # change options
2103
 
        self.assertRaises(KnitCorrupt, index.add_records,
2104
 
                          [((b'tip',), b'no-eol,line-delta', (None, 0, 100), [])])
2105
 
        self.assertRaises(KnitCorrupt, index.add_records,
2106
 
                          [((b'tip',), b'line-delta,no-eol', (None, 0, 100), [])])
2107
 
        self.assertRaises(KnitCorrupt, index.add_records,
2108
 
                          [((b'tip',), b'fulltext', (None, 0, 100), [])])
 
2055
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2056
            [(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
 
2057
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2058
            [(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
 
2059
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2060
            [(('tip',), 'fulltext', (None, 0, 100), [])])
2109
2061
        # parents
2110
 
        self.assertRaises(KnitCorrupt, index.add_records,
2111
 
                          [((b'tip',), b'fulltext,no-eol', (None, 0, 100), [(b'parent',)])])
 
2062
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2063
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2112
2064
        # change options in the second record
2113
 
        self.assertRaises(KnitCorrupt, index.add_records,
2114
 
                          [((b'tip',), b'fulltext,no-eol', (None, 0, 100), []),
2115
 
                           ((b'tip',), b'no-eol,line-delta', (None, 0, 100), [])])
 
2065
        self.assertRaises(errors.KnitCorrupt, index.add_records,
 
2066
            [(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
 
2067
             (('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2116
2068
        self.assertEqual([], self.caught_entries)
2117
2069
 
2118
2070
 
2124
2076
        if _min_buffer_size is None:
2125
2077
            _min_buffer_size = knit._STREAM_MIN_BUFFER_SIZE
2126
2078
        self.assertEqual(exp_groups, kvf._group_keys_for_io(keys,
2127
 
                                                            non_local_keys, positions,
2128
 
                                                            _min_buffer_size=_min_buffer_size))
 
2079
                                        non_local_keys, positions,
 
2080
                                        _min_buffer_size=_min_buffer_size))
2129
2081
 
2130
2082
    def assertSplitByPrefix(self, expected_map, expected_prefix_order,
2131
2083
                            keys):
2136
2088
    def test__group_keys_for_io(self):
2137
2089
        ft_detail = ('fulltext', False)
2138
2090
        ld_detail = ('line-delta', False)
2139
 
        f_a = (b'f', b'a')
2140
 
        f_b = (b'f', b'b')
2141
 
        f_c = (b'f', b'c')
2142
 
        g_a = (b'g', b'a')
2143
 
        g_b = (b'g', b'b')
2144
 
        g_c = (b'g', b'c')
 
2091
        f_a = ('f', 'a')
 
2092
        f_b = ('f', 'b')
 
2093
        f_c = ('f', 'c')
 
2094
        g_a = ('g', 'a')
 
2095
        g_b = ('g', 'b')
 
2096
        g_c = ('g', 'c')
2145
2097
        positions = {
2146
2098
            f_a: (ft_detail, (f_a, 0, 100), None),
2147
2099
            f_b: (ld_detail, (f_b, 100, 21), f_a),
2152
2104
            }
2153
2105
        self.assertGroupKeysForIo([([f_a], set())],
2154
2106
                                  [f_a], [], positions)
2155
 
        self.assertGroupKeysForIo([([f_a], {f_a})],
 
2107
        self.assertGroupKeysForIo([([f_a], set([f_a]))],
2156
2108
                                  [f_a], [f_a], positions)
2157
2109
        self.assertGroupKeysForIo([([f_a, f_b], set([]))],
2158
2110
                                  [f_a, f_b], [], positions)
2159
 
        self.assertGroupKeysForIo([([f_a, f_b], {f_b})],
 
2111
        self.assertGroupKeysForIo([([f_a, f_b], set([f_b]))],
2160
2112
                                  [f_a, f_b], [f_b], positions)
2161
2113
        self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2162
2114
                                  [f_a, g_a, f_b, g_b], [], positions)
2174
2126
                                  _min_buffer_size=125)
2175
2127
 
2176
2128
    def test__split_by_prefix(self):
2177
 
        self.assertSplitByPrefix({b'f': [(b'f', b'a'), (b'f', b'b')],
2178
 
                                  b'g': [(b'g', b'b'), (b'g', b'a')],
2179
 
                                  }, [b'f', b'g'],
2180
 
                                 [(b'f', b'a'), (b'g', b'b'),
2181
 
                                  (b'g', b'a'), (b'f', b'b')])
2182
 
 
2183
 
        self.assertSplitByPrefix({b'f': [(b'f', b'a'), (b'f', b'b')],
2184
 
                                  b'g': [(b'g', b'b'), (b'g', b'a')],
2185
 
                                  }, [b'f', b'g'],
2186
 
                                 [(b'f', b'a'), (b'f', b'b'),
2187
 
                                  (b'g', b'b'), (b'g', b'a')])
2188
 
 
2189
 
        self.assertSplitByPrefix({b'f': [(b'f', b'a'), (b'f', b'b')],
2190
 
                                  b'g': [(b'g', b'b'), (b'g', b'a')],
2191
 
                                  }, [b'f', b'g'],
2192
 
                                 [(b'f', b'a'), (b'f', b'b'),
2193
 
                                  (b'g', b'b'), (b'g', b'a')])
2194
 
 
2195
 
        self.assertSplitByPrefix({b'f': [(b'f', b'a'), (b'f', b'b')],
2196
 
                                  b'g': [(b'g', b'b'), (b'g', b'a')],
2197
 
                                  b'': [(b'a',), (b'b',)]
2198
 
                                  }, [b'f', b'g', b''],
2199
 
                                 [(b'f', b'a'), (b'g', b'b'),
2200
 
                                  (b'a',), (b'b',),
2201
 
                                  (b'g', b'a'), (b'f', b'b')])
 
2129
        self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
 
2130
                                  'g': [('g', 'b'), ('g', 'a')],
 
2131
                                 }, ['f', 'g'],
 
2132
                                 [('f', 'a'), ('g', 'b'),
 
2133
                                  ('g', 'a'), ('f', 'b')])
 
2134
 
 
2135
        self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
 
2136
                                  'g': [('g', 'b'), ('g', 'a')],
 
2137
                                 }, ['f', 'g'],
 
2138
                                 [('f', 'a'), ('f', 'b'),
 
2139
                                  ('g', 'b'), ('g', 'a')])
 
2140
 
 
2141
        self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
 
2142
                                  'g': [('g', 'b'), ('g', 'a')],
 
2143
                                 }, ['f', 'g'],
 
2144
                                 [('f', 'a'), ('f', 'b'),
 
2145
                                  ('g', 'b'), ('g', 'a')])
 
2146
 
 
2147
        self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
 
2148
                                  'g': [('g', 'b'), ('g', 'a')],
 
2149
                                  '': [('a',), ('b',)]
 
2150
                                 }, ['f', 'g', ''],
 
2151
                                 [('f', 'a'), ('g', 'b'),
 
2152
                                  ('a',), ('b',),
 
2153
                                  ('g', 'a'), ('f', 'b')])
2202
2154
 
2203
2155
 
2204
2156
class TestStacking(KnitTests):
2220
2172
    def test_add_lines(self):
2221
2173
        # lines added to the test are not added to the basis
2222
2174
        basis, test = self.get_basis_and_test_knit()
2223
 
        key = (b'foo',)
2224
 
        key_basis = (b'bar',)
2225
 
        key_cross_border = (b'quux',)
2226
 
        key_delta = (b'zaphod',)
2227
 
        test.add_lines(key, (), [b'foo\n'])
 
2175
        key = ('foo',)
 
2176
        key_basis = ('bar',)
 
2177
        key_cross_border = ('quux',)
 
2178
        key_delta = ('zaphod',)
 
2179
        test.add_lines(key, (), ['foo\n'])
2228
2180
        self.assertEqual({}, basis.get_parent_map([key]))
2229
2181
        # lines added to the test that reference across the stack do a
2230
2182
        # fulltext.
2231
 
        basis.add_lines(key_basis, (), [b'foo\n'])
 
2183
        basis.add_lines(key_basis, (), ['foo\n'])
2232
2184
        basis.calls = []
2233
 
        test.add_lines(key_cross_border, (key_basis,), [b'foo\n'])
 
2185
        test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
2234
2186
        self.assertEqual('fulltext', test._index.get_method(key_cross_border))
2235
2187
        # we don't even need to look at the basis to see that this should be
2236
2188
        # stored as a fulltext
2237
2189
        self.assertEqual([], basis.calls)
2238
2190
        # Subsequent adds do delta.
2239
2191
        basis.calls = []
2240
 
        test.add_lines(key_delta, (key_cross_border,), [b'foo\n'])
 
2192
        test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
2241
2193
        self.assertEqual('line-delta', test._index.get_method(key_delta))
2242
2194
        self.assertEqual([], basis.calls)
2243
2195
 
2244
2196
    def test_annotate(self):
2245
2197
        # annotations from the test knit are answered without asking the basis
2246
2198
        basis, test = self.get_basis_and_test_knit()
2247
 
        key = (b'foo',)
2248
 
        key_basis = (b'bar',)
2249
 
        test.add_lines(key, (), [b'foo\n'])
 
2199
        key = ('foo',)
 
2200
        key_basis = ('bar',)
 
2201
        key_missing = ('missing',)
 
2202
        test.add_lines(key, (), ['foo\n'])
2250
2203
        details = test.annotate(key)
2251
 
        self.assertEqual([(key, b'foo\n')], details)
 
2204
        self.assertEqual([(key, 'foo\n')], details)
2252
2205
        self.assertEqual([], basis.calls)
2253
2206
        # But texts that are not in the test knit are looked for in the basis
2254
2207
        # directly.
2255
 
        basis.add_lines(key_basis, (), [b'foo\n', b'bar\n'])
 
2208
        basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2256
2209
        basis.calls = []
2257
2210
        details = test.annotate(key_basis)
2258
 
        self.assertEqual(
2259
 
            [(key_basis, b'foo\n'), (key_basis, b'bar\n')], details)
 
2211
        self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
2260
2212
        # Not optimised to date:
2261
2213
        # self.assertEqual([("annotate", key_basis)], basis.calls)
2262
 
        self.assertEqual([('get_parent_map', {key_basis}),
2263
 
                          ('get_parent_map', {key_basis}),
2264
 
                          ('get_record_stream', [key_basis], 'topological', True)],
2265
 
                         basis.calls)
 
2214
        self.assertEqual([('get_parent_map', set([key_basis])),
 
2215
            ('get_parent_map', set([key_basis])),
 
2216
            ('get_record_stream', [key_basis], 'topological', True)],
 
2217
            basis.calls)
2266
2218
 
2267
2219
    def test_check(self):
2268
2220
        # At the moment checking a stacked knit does implicitly check the
2273
2225
    def test_get_parent_map(self):
2274
2226
        # parents in the test knit are answered without asking the basis
2275
2227
        basis, test = self.get_basis_and_test_knit()
2276
 
        key = (b'foo',)
2277
 
        key_basis = (b'bar',)
2278
 
        key_missing = (b'missing',)
 
2228
        key = ('foo',)
 
2229
        key_basis = ('bar',)
 
2230
        key_missing = ('missing',)
2279
2231
        test.add_lines(key, (), [])
2280
2232
        parent_map = test.get_parent_map([key])
2281
2233
        self.assertEqual({key: ()}, parent_map)
2285
2237
        basis.calls = []
2286
2238
        parent_map = test.get_parent_map([key, key_basis, key_missing])
2287
2239
        self.assertEqual({key: (),
2288
 
                          key_basis: ()}, parent_map)
2289
 
        self.assertEqual([("get_parent_map", {key_basis, key_missing})],
2290
 
                         basis.calls)
 
2240
            key_basis: ()}, parent_map)
 
2241
        self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
 
2242
            basis.calls)
2291
2243
 
2292
2244
    def test_get_record_stream_unordered_fulltexts(self):
2293
2245
        # records from the test knit are answered without asking the basis:
2294
2246
        basis, test = self.get_basis_and_test_knit()
2295
 
        key = (b'foo',)
2296
 
        key_basis = (b'bar',)
2297
 
        key_missing = (b'missing',)
2298
 
        test.add_lines(key, (), [b'foo\n'])
 
2247
        key = ('foo',)
 
2248
        key_basis = ('bar',)
 
2249
        key_missing = ('missing',)
 
2250
        test.add_lines(key, (), ['foo\n'])
2299
2251
        records = list(test.get_record_stream([key], 'unordered', True))
2300
2252
        self.assertEqual(1, len(records))
2301
2253
        self.assertEqual([], basis.calls)
2302
2254
        # Missing (from test knit) objects are retrieved from the basis:
2303
 
        basis.add_lines(key_basis, (), [b'foo\n', b'bar\n'])
 
2255
        basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2304
2256
        basis.calls = []
2305
2257
        records = list(test.get_record_stream([key_basis, key_missing],
2306
 
                                              'unordered', True))
 
2258
            'unordered', True))
2307
2259
        self.assertEqual(2, len(records))
2308
2260
        calls = list(basis.calls)
2309
2261
        for record in records:
2312
2264
                self.assertIsInstance(record, AbsentContentFactory)
2313
2265
            else:
2314
2266
                reference = list(basis.get_record_stream([key_basis],
2315
 
                                                         'unordered', True))[0]
 
2267
                    'unordered', True))[0]
2316
2268
                self.assertEqual(reference.key, record.key)
2317
2269
                self.assertEqual(reference.sha1, record.sha1)
2318
2270
                self.assertEqual(reference.storage_kind, record.storage_kind)
2319
2271
                self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2320
 
                                 record.get_bytes_as(record.storage_kind))
 
2272
                    record.get_bytes_as(record.storage_kind))
2321
2273
                self.assertEqual(reference.get_bytes_as('fulltext'),
2322
 
                                 record.get_bytes_as('fulltext'))
 
2274
                    record.get_bytes_as('fulltext'))
2323
2275
        # It's not strictly minimal, but it seems reasonable for now for it to
2324
2276
        # ask which fallbacks have which parents.
2325
2277
        self.assertEqual([
2326
 
            ("get_parent_map", {key_basis, key_missing}),
 
2278
            ("get_parent_map", set([key_basis, key_missing])),
2327
2279
            ("get_record_stream", [key_basis], 'unordered', True)],
2328
2280
            calls)
2329
2281
 
2330
2282
    def test_get_record_stream_ordered_fulltexts(self):
2331
2283
        # ordering is preserved down into the fallback store.
2332
2284
        basis, test = self.get_basis_and_test_knit()
2333
 
        key = (b'foo',)
2334
 
        key_basis = (b'bar',)
2335
 
        key_basis_2 = (b'quux',)
2336
 
        key_missing = (b'missing',)
2337
 
        test.add_lines(key, (key_basis,), [b'foo\n'])
 
2285
        key = ('foo',)
 
2286
        key_basis = ('bar',)
 
2287
        key_basis_2 = ('quux',)
 
2288
        key_missing = ('missing',)
 
2289
        test.add_lines(key, (key_basis,), ['foo\n'])
2338
2290
        # Missing (from test knit) objects are retrieved from the basis:
2339
 
        basis.add_lines(key_basis, (key_basis_2,), [b'foo\n', b'bar\n'])
2340
 
        basis.add_lines(key_basis_2, (), [b'quux\n'])
 
2291
        basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
 
2292
        basis.add_lines(key_basis_2, (), ['quux\n'])
2341
2293
        basis.calls = []
2342
2294
        # ask for in non-topological order
2343
2295
        records = list(test.get_record_stream(
2346
2298
        results = []
2347
2299
        for record in records:
2348
2300
            self.assertSubset([record.key],
2349
 
                              (key_basis, key_missing, key_basis_2, key))
 
2301
                (key_basis, key_missing, key_basis_2, key))
2350
2302
            if record.key == key_missing:
2351
2303
                self.assertIsInstance(record, AbsentContentFactory)
2352
2304
            else:
2353
2305
                results.append((record.key, record.sha1, record.storage_kind,
2354
 
                                record.get_bytes_as('fulltext')))
 
2306
                    record.get_bytes_as('fulltext')))
2355
2307
        calls = list(basis.calls)
2356
2308
        order = [record[0] for record in results]
2357
2309
        self.assertEqual([key_basis_2, key_basis, key], order)
2360
2312
                source = test
2361
2313
            else:
2362
2314
                source = basis
2363
 
            record = next(source.get_record_stream([result[0]], 'unordered',
2364
 
                                                   True))
 
2315
            record = source.get_record_stream([result[0]], 'unordered',
 
2316
                True).next()
2365
2317
            self.assertEqual(record.key, result[0])
2366
2318
            self.assertEqual(record.sha1, result[1])
2367
2319
            # We used to check that the storage kind matched, but actually it
2371
2323
            self.assertEqual(record.get_bytes_as('fulltext'), result[3])
2372
2324
        # It's not strictly minimal, but it seems reasonable for now for it to
2373
2325
        # ask which fallbacks have which parents.
2374
 
        self.assertEqual(2, len(calls))
2375
 
        self.assertEqual(
2376
 
            ("get_parent_map", {key_basis, key_basis_2, key_missing}),
2377
 
            calls[0])
2378
 
        # topological is requested from the fallback, because that is what
2379
 
        # was requested at the top level.
2380
 
        self.assertIn(
2381
 
            calls[1], [
2382
 
                ("get_record_stream", [key_basis_2,
2383
 
                                       key_basis], 'topological', True),
2384
 
                ("get_record_stream", [key_basis, key_basis_2], 'topological', True)])
 
2326
        self.assertEqual([
 
2327
            ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
 
2328
            # topological is requested from the fallback, because that is what
 
2329
            # was requested at the top level.
 
2330
            ("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
 
2331
            calls)
2385
2332
 
2386
2333
    def test_get_record_stream_unordered_deltas(self):
2387
2334
        # records from the test knit are answered without asking the basis:
2388
2335
        basis, test = self.get_basis_and_test_knit()
2389
 
        key = (b'foo',)
2390
 
        key_basis = (b'bar',)
2391
 
        key_missing = (b'missing',)
2392
 
        test.add_lines(key, (), [b'foo\n'])
 
2336
        key = ('foo',)
 
2337
        key_basis = ('bar',)
 
2338
        key_missing = ('missing',)
 
2339
        test.add_lines(key, (), ['foo\n'])
2393
2340
        records = list(test.get_record_stream([key], 'unordered', False))
2394
2341
        self.assertEqual(1, len(records))
2395
2342
        self.assertEqual([], basis.calls)
2396
2343
        # Missing (from test knit) objects are retrieved from the basis:
2397
 
        basis.add_lines(key_basis, (), [b'foo\n', b'bar\n'])
 
2344
        basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2398
2345
        basis.calls = []
2399
2346
        records = list(test.get_record_stream([key_basis, key_missing],
2400
 
                                              'unordered', False))
 
2347
            'unordered', False))
2401
2348
        self.assertEqual(2, len(records))
2402
2349
        calls = list(basis.calls)
2403
2350
        for record in records:
2406
2353
                self.assertIsInstance(record, AbsentContentFactory)
2407
2354
            else:
2408
2355
                reference = list(basis.get_record_stream([key_basis],
2409
 
                                                         'unordered', False))[0]
 
2356
                    'unordered', False))[0]
2410
2357
                self.assertEqual(reference.key, record.key)
2411
2358
                self.assertEqual(reference.sha1, record.sha1)
2412
2359
                self.assertEqual(reference.storage_kind, record.storage_kind)
2413
2360
                self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2414
 
                                 record.get_bytes_as(record.storage_kind))
 
2361
                    record.get_bytes_as(record.storage_kind))
2415
2362
        # It's not strictly minimal, but it seems reasonable for now for it to
2416
2363
        # ask which fallbacks have which parents.
2417
2364
        self.assertEqual([
2418
 
            ("get_parent_map", {key_basis, key_missing}),
 
2365
            ("get_parent_map", set([key_basis, key_missing])),
2419
2366
            ("get_record_stream", [key_basis], 'unordered', False)],
2420
2367
            calls)
2421
2368
 
2422
2369
    def test_get_record_stream_ordered_deltas(self):
2423
2370
        # ordering is preserved down into the fallback store.
2424
2371
        basis, test = self.get_basis_and_test_knit()
2425
 
        key = (b'foo',)
2426
 
        key_basis = (b'bar',)
2427
 
        key_basis_2 = (b'quux',)
2428
 
        key_missing = (b'missing',)
2429
 
        test.add_lines(key, (key_basis,), [b'foo\n'])
 
2372
        key = ('foo',)
 
2373
        key_basis = ('bar',)
 
2374
        key_basis_2 = ('quux',)
 
2375
        key_missing = ('missing',)
 
2376
        test.add_lines(key, (key_basis,), ['foo\n'])
2430
2377
        # Missing (from test knit) objects are retrieved from the basis:
2431
 
        basis.add_lines(key_basis, (key_basis_2,), [b'foo\n', b'bar\n'])
2432
 
        basis.add_lines(key_basis_2, (), [b'quux\n'])
 
2378
        basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
 
2379
        basis.add_lines(key_basis_2, (), ['quux\n'])
2433
2380
        basis.calls = []
2434
2381
        # ask for in non-topological order
2435
2382
        records = list(test.get_record_stream(
2438
2385
        results = []
2439
2386
        for record in records:
2440
2387
            self.assertSubset([record.key],
2441
 
                              (key_basis, key_missing, key_basis_2, key))
 
2388
                (key_basis, key_missing, key_basis_2, key))
2442
2389
            if record.key == key_missing:
2443
2390
                self.assertIsInstance(record, AbsentContentFactory)
2444
2391
            else:
2445
2392
                results.append((record.key, record.sha1, record.storage_kind,
2446
 
                                record.get_bytes_as(record.storage_kind)))
 
2393
                    record.get_bytes_as(record.storage_kind)))
2447
2394
        calls = list(basis.calls)
2448
2395
        order = [record[0] for record in results]
2449
2396
        self.assertEqual([key_basis_2, key_basis, key], order)
2452
2399
                source = test
2453
2400
            else:
2454
2401
                source = basis
2455
 
            record = next(source.get_record_stream([result[0]], 'unordered',
2456
 
                                                   False))
 
2402
            record = source.get_record_stream([result[0]], 'unordered',
 
2403
                False).next()
2457
2404
            self.assertEqual(record.key, result[0])
2458
2405
            self.assertEqual(record.sha1, result[1])
2459
2406
            self.assertEqual(record.storage_kind, result[2])
2460
 
            self.assertEqual(record.get_bytes_as(
2461
 
                record.storage_kind), result[3])
 
2407
            self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
2462
2408
        # It's not strictly minimal, but it seems reasonable for now for it to
2463
2409
        # ask which fallbacks have which parents.
2464
2410
        self.assertEqual([
2465
 
            ("get_parent_map", {key_basis, key_basis_2, key_missing}),
 
2411
            ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2466
2412
            ("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
2467
2413
            calls)
2468
2414
 
2469
2415
    def test_get_sha1s(self):
2470
2416
        # sha1's in the test knit are answered without asking the basis
2471
2417
        basis, test = self.get_basis_and_test_knit()
2472
 
        key = (b'foo',)
2473
 
        key_basis = (b'bar',)
2474
 
        key_missing = (b'missing',)
2475
 
        test.add_lines(key, (), [b'foo\n'])
2476
 
        key_sha1sum = osutils.sha_string(b'foo\n')
 
2418
        key = ('foo',)
 
2419
        key_basis = ('bar',)
 
2420
        key_missing = ('missing',)
 
2421
        test.add_lines(key, (), ['foo\n'])
 
2422
        key_sha1sum = osutils.sha('foo\n').hexdigest()
2477
2423
        sha1s = test.get_sha1s([key])
2478
2424
        self.assertEqual({key: key_sha1sum}, sha1s)
2479
2425
        self.assertEqual([], basis.calls)
2480
2426
        # But texts that are not in the test knit are looked for in the basis
2481
2427
        # directly (rather than via text reconstruction) so that remote servers
2482
2428
        # etc don't have to answer with full content.
2483
 
        basis.add_lines(key_basis, (), [b'foo\n', b'bar\n'])
2484
 
        basis_sha1sum = osutils.sha_string(b'foo\nbar\n')
 
2429
        basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
 
2430
        basis_sha1sum = osutils.sha('foo\nbar\n').hexdigest()
2485
2431
        basis.calls = []
2486
2432
        sha1s = test.get_sha1s([key, key_missing, key_basis])
2487
2433
        self.assertEqual({key: key_sha1sum,
2488
 
                          key_basis: basis_sha1sum}, sha1s)
2489
 
        self.assertEqual([("get_sha1s", {key_basis, key_missing})],
2490
 
                         basis.calls)
 
2434
            key_basis: basis_sha1sum}, sha1s)
 
2435
        self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
 
2436
            basis.calls)
2491
2437
 
2492
2438
    def test_insert_record_stream(self):
2493
2439
        # records are inserted as normal; insert_record_stream builds on
2494
2440
        # add_lines, so a smoke test should be all that's needed:
2495
 
        key_basis = (b'bar',)
2496
 
        key_delta = (b'zaphod',)
 
2441
        key = ('foo',)
 
2442
        key_basis = ('bar',)
 
2443
        key_delta = ('zaphod',)
2497
2444
        basis, test = self.get_basis_and_test_knit()
2498
2445
        source = self.make_test_knit(name='source')
2499
 
        basis.add_lines(key_basis, (), [b'foo\n'])
 
2446
        basis.add_lines(key_basis, (), ['foo\n'])
2500
2447
        basis.calls = []
2501
 
        source.add_lines(key_basis, (), [b'foo\n'])
2502
 
        source.add_lines(key_delta, (key_basis,), [b'bar\n'])
 
2448
        source.add_lines(key_basis, (), ['foo\n'])
 
2449
        source.add_lines(key_delta, (key_basis,), ['bar\n'])
2503
2450
        stream = source.get_record_stream([key_delta], 'unordered', False)
2504
2451
        test.insert_record_stream(stream)
2505
2452
        # XXX: this does somewhat too many calls in making sure of whether it
2506
2453
        # has to recreate the full text.
2507
 
        self.assertEqual([("get_parent_map", {key_basis}),
2508
 
                          ('get_parent_map', {key_basis}),
2509
 
                          ('get_record_stream', [key_basis], 'unordered', True)],
2510
 
                         basis.calls)
2511
 
        self.assertEqual({key_delta: (key_basis,)},
2512
 
                         test.get_parent_map([key_delta]))
2513
 
        self.assertEqual(b'bar\n', next(test.get_record_stream([key_delta],
2514
 
                                                               'unordered', True)).get_bytes_as('fulltext'))
 
2454
        self.assertEqual([("get_parent_map", set([key_basis])),
 
2455
             ('get_parent_map', set([key_basis])),
 
2456
             ('get_record_stream', [key_basis], 'unordered', True)],
 
2457
            basis.calls)
 
2458
        self.assertEqual({key_delta:(key_basis,)},
 
2459
            test.get_parent_map([key_delta]))
 
2460
        self.assertEqual('bar\n', test.get_record_stream([key_delta],
 
2461
            'unordered', True).next().get_bytes_as('fulltext'))
2515
2462
 
2516
2463
    def test_iter_lines_added_or_present_in_keys(self):
2517
2464
        # Lines from the basis are returned, and lines for a given key are only
2518
2465
        # returned once.
2519
 
        key1 = (b'foo1',)
2520
 
        key2 = (b'foo2',)
 
2466
        key1 = ('foo1',)
 
2467
        key2 = ('foo2',)
2521
2468
        # all sources are asked for keys:
2522
2469
        basis, test = self.get_basis_and_test_knit()
2523
 
        basis.add_lines(key1, (), [b"foo"])
 
2470
        basis.add_lines(key1, (), ["foo"])
2524
2471
        basis.calls = []
2525
2472
        lines = list(test.iter_lines_added_or_present_in_keys([key1]))
2526
 
        self.assertEqual([(b"foo\n", key1)], lines)
2527
 
        self.assertEqual([("iter_lines_added_or_present_in_keys", {key1})],
2528
 
                         basis.calls)
 
2473
        self.assertEqual([("foo\n", key1)], lines)
 
2474
        self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
 
2475
            basis.calls)
2529
2476
        # keys in both are not duplicated:
2530
 
        test.add_lines(key2, (), [b"bar\n"])
2531
 
        basis.add_lines(key2, (), [b"bar\n"])
 
2477
        test.add_lines(key2, (), ["bar\n"])
 
2478
        basis.add_lines(key2, (), ["bar\n"])
2532
2479
        basis.calls = []
2533
2480
        lines = list(test.iter_lines_added_or_present_in_keys([key2]))
2534
 
        self.assertEqual([(b"bar\n", key2)], lines)
 
2481
        self.assertEqual([("bar\n", key2)], lines)
2535
2482
        self.assertEqual([], basis.calls)
2536
2483
 
2537
2484
    def test_keys(self):
2538
 
        key1 = (b'foo1',)
2539
 
        key2 = (b'foo2',)
 
2485
        key1 = ('foo1',)
 
2486
        key2 = ('foo2',)
2540
2487
        # all sources are asked for keys:
2541
2488
        basis, test = self.get_basis_and_test_knit()
2542
2489
        keys = test.keys()
2546
2493
        basis.add_lines(key1, (), [])
2547
2494
        basis.calls = []
2548
2495
        keys = test.keys()
2549
 
        self.assertEqual({key1}, set(keys))
 
2496
        self.assertEqual(set([key1]), set(keys))
2550
2497
        self.assertEqual([("keys",)], basis.calls)
2551
2498
        # keys in both are not duplicated:
2552
2499
        test.add_lines(key2, (), [])
2554
2501
        basis.calls = []
2555
2502
        keys = test.keys()
2556
2503
        self.assertEqual(2, len(keys))
2557
 
        self.assertEqual({key1, key2}, set(keys))
 
2504
        self.assertEqual(set([key1, key2]), set(keys))
2558
2505
        self.assertEqual([("keys",)], basis.calls)
2559
2506
 
2560
2507
    def test_add_mpdiffs(self):
2561
2508
        # records are inserted as normal; add_mpdiff builds on
2562
2509
        # add_lines, so a smoke test should be all that's needed:
2563
 
        key_basis = (b'bar',)
2564
 
        key_delta = (b'zaphod',)
 
2510
        key = ('foo',)
 
2511
        key_basis = ('bar',)
 
2512
        key_delta = ('zaphod',)
2565
2513
        basis, test = self.get_basis_and_test_knit()
2566
2514
        source = self.make_test_knit(name='source')
2567
 
        basis.add_lines(key_basis, (), [b'foo\n'])
 
2515
        basis.add_lines(key_basis, (), ['foo\n'])
2568
2516
        basis.calls = []
2569
 
        source.add_lines(key_basis, (), [b'foo\n'])
2570
 
        source.add_lines(key_delta, (key_basis,), [b'bar\n'])
 
2517
        source.add_lines(key_basis, (), ['foo\n'])
 
2518
        source.add_lines(key_delta, (key_basis,), ['bar\n'])
2571
2519
        diffs = source.make_mpdiffs([key_delta])
2572
2520
        test.add_mpdiffs([(key_delta, (key_basis,),
2573
 
                           source.get_sha1s([key_delta])[key_delta], diffs[0])])
2574
 
        self.assertEqual([("get_parent_map", {key_basis}),
2575
 
                          ('get_record_stream', [key_basis], 'unordered', True), ],
2576
 
                         basis.calls)
2577
 
        self.assertEqual({key_delta: (key_basis,)},
2578
 
                         test.get_parent_map([key_delta]))
2579
 
        self.assertEqual(b'bar\n', next(test.get_record_stream([key_delta],
2580
 
                                                               'unordered', True)).get_bytes_as('fulltext'))
 
2521
            source.get_sha1s([key_delta])[key_delta], diffs[0])])
 
2522
        self.assertEqual([("get_parent_map", set([key_basis])),
 
2523
            ('get_record_stream', [key_basis], 'unordered', True),],
 
2524
            basis.calls)
 
2525
        self.assertEqual({key_delta:(key_basis,)},
 
2526
            test.get_parent_map([key_delta]))
 
2527
        self.assertEqual('bar\n', test.get_record_stream([key_delta],
 
2528
            'unordered', True).next().get_bytes_as('fulltext'))
2581
2529
 
2582
2530
    def test_make_mpdiffs(self):
2583
2531
        # Generating an mpdiff across a stacking boundary should detect parent
2584
2532
        # texts regions.
2585
 
        key = (b'foo',)
2586
 
        key_left = (b'bar',)
2587
 
        key_right = (b'zaphod',)
 
2533
        key = ('foo',)
 
2534
        key_left = ('bar',)
 
2535
        key_right = ('zaphod',)
2588
2536
        basis, test = self.get_basis_and_test_knit()
2589
 
        basis.add_lines(key_left, (), [b'bar\n'])
2590
 
        basis.add_lines(key_right, (), [b'zaphod\n'])
 
2537
        basis.add_lines(key_left, (), ['bar\n'])
 
2538
        basis.add_lines(key_right, (), ['zaphod\n'])
2591
2539
        basis.calls = []
2592
2540
        test.add_lines(key, (key_left, key_right),
2593
 
                       [b'bar\n', b'foo\n', b'zaphod\n'])
 
2541
            ['bar\n', 'foo\n', 'zaphod\n'])
2594
2542
        diffs = test.make_mpdiffs([key])
2595
2543
        self.assertEqual([
2596
2544
            multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
2597
 
                                     multiparent.NewText([b'foo\n']),
2598
 
                                     multiparent.ParentText(1, 0, 2, 1)])],
2599
 
                         diffs)
 
2545
                multiparent.NewText(['foo\n']),
 
2546
                multiparent.ParentText(1, 0, 2, 1)])],
 
2547
            diffs)
2600
2548
        self.assertEqual(3, len(basis.calls))
2601
2549
        self.assertEqual([
2602
 
            ("get_parent_map", {key_left, key_right}),
2603
 
            ("get_parent_map", {key_left, key_right}),
 
2550
            ("get_parent_map", set([key_left, key_right])),
 
2551
            ("get_parent_map", set([key_left, key_right])),
2604
2552
            ],
2605
2553
            basis.calls[:-1])
2606
2554
        last_call = basis.calls[-1]
2607
2555
        self.assertEqual('get_record_stream', last_call[0])
2608
 
        self.assertEqual({key_left, key_right}, set(last_call[1]))
 
2556
        self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2609
2557
        self.assertEqual('topological', last_call[2])
2610
2558
        self.assertEqual(True, last_call[3])
2611
2559
 
2616
2564
    def test_include_delta_closure_generates_a_knit_delta_closure(self):
2617
2565
        vf = self.make_test_knit(name='test')
2618
2566
        # put in three texts, giving ft, delta, delta
2619
 
        vf.add_lines((b'base',), (), [b'base\n', b'content\n'])
2620
 
        vf.add_lines((b'd1',), ((b'base',),), [b'd1\n'])
2621
 
        vf.add_lines((b'd2',), ((b'd1',),), [b'd2\n'])
 
2567
        vf.add_lines(('base',), (), ['base\n', 'content\n'])
 
2568
        vf.add_lines(('d1',), (('base',),), ['d1\n'])
 
2569
        vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2622
2570
        # But heuristics could interfere, so check what happened:
2623
2571
        self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
2624
 
                         [record.storage_kind for record in
2625
 
                          vf.get_record_stream([(b'base',), (b'd1',), (b'd2',)],
2626
 
                                               'topological', False)])
 
2572
            [record.storage_kind for record in
 
2573
             vf.get_record_stream([('base',), ('d1',), ('d2',)],
 
2574
                'topological', False)])
2627
2575
        # generate a stream of just the deltas include_delta_closure=True,
2628
2576
        # serialise to the network, and check that we get a delta closure on the wire.
2629
 
        stream = vf.get_record_stream(
2630
 
            [(b'd1',), (b'd2',)], 'topological', True)
 
2577
        stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
2631
2578
        netb = [record.get_bytes_as(record.storage_kind) for record in stream]
2632
2579
        # The first bytes should be a memo from _ContentMapGenerator, and the
2633
2580
        # second bytes should be empty (because its a API proxy not something
2634
2581
        # for wire serialisation.
2635
 
        self.assertEqual(b'', netb[1])
 
2582
        self.assertEqual('', netb[1])
2636
2583
        bytes = netb[0]
2637
2584
        kind, line_end = network_bytes_to_kind_and_offset(bytes)
2638
2585
        self.assertEqual('knit-delta-closure', kind)
2644
2591
    def test_get_record_stream_gives_records(self):
2645
2592
        vf = self.make_test_knit(name='test')
2646
2593
        # put in three texts, giving ft, delta, delta
2647
 
        vf.add_lines((b'base',), (), [b'base\n', b'content\n'])
2648
 
        vf.add_lines((b'd1',), ((b'base',),), [b'd1\n'])
2649
 
        vf.add_lines((b'd2',), ((b'd1',),), [b'd2\n'])
2650
 
        keys = [(b'd1',), (b'd2',)]
 
2594
        vf.add_lines(('base',), (), ['base\n', 'content\n'])
 
2595
        vf.add_lines(('d1',), (('base',),), ['d1\n'])
 
2596
        vf.add_lines(('d2',), (('d1',),), ['d2\n'])
 
2597
        keys = [('d1',), ('d2',)]
2651
2598
        generator = _VFContentMapGenerator(vf, keys,
2652
 
                                           global_map=vf.get_parent_map(keys))
 
2599
            global_map=vf.get_parent_map(keys))
2653
2600
        for record in generator.get_record_stream():
2654
 
            if record.key == (b'd1',):
2655
 
                self.assertEqual(b'd1\n', record.get_bytes_as('fulltext'))
 
2601
            if record.key == ('d1',):
 
2602
                self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
2656
2603
            else:
2657
 
                self.assertEqual(b'd2\n', record.get_bytes_as('fulltext'))
 
2604
                self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
2658
2605
 
2659
2606
    def test_get_record_stream_kinds_are_raw(self):
2660
2607
        vf = self.make_test_knit(name='test')
2661
2608
        # put in three texts, giving ft, delta, delta
2662
 
        vf.add_lines((b'base',), (), [b'base\n', b'content\n'])
2663
 
        vf.add_lines((b'd1',), ((b'base',),), [b'd1\n'])
2664
 
        vf.add_lines((b'd2',), ((b'd1',),), [b'd2\n'])
2665
 
        keys = [(b'base',), (b'd1',), (b'd2',)]
 
2609
        vf.add_lines(('base',), (), ['base\n', 'content\n'])
 
2610
        vf.add_lines(('d1',), (('base',),), ['d1\n'])
 
2611
        vf.add_lines(('d2',), (('d1',),), ['d2\n'])
 
2612
        keys = [('base',), ('d1',), ('d2',)]
2666
2613
        generator = _VFContentMapGenerator(vf, keys,
2667
 
                                           global_map=vf.get_parent_map(keys))
2668
 
        kinds = {(b'base',): 'knit-delta-closure',
2669
 
                 (b'd1',): 'knit-delta-closure-ref',
2670
 
                 (b'd2',): 'knit-delta-closure-ref',
2671
 
                 }
 
2614
            global_map=vf.get_parent_map(keys))
 
2615
        kinds = {('base',): 'knit-delta-closure',
 
2616
            ('d1',): 'knit-delta-closure-ref',
 
2617
            ('d2',): 'knit-delta-closure-ref',
 
2618
            }
2672
2619
        for record in generator.get_record_stream():
2673
2620
            self.assertEqual(kinds[record.key], record.storage_kind)