/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/test_groupcompress.py

  • Committer: Jelmer Vernooij
  • Date: 2019-03-04 00:16:27 UTC
  • mfrom: (7293 work)
  • mto: This revision was merged to the branch mainline in revision 7318.
  • Revision ID: jelmer@jelmer.uk-20190304001627-v6u7o6pf97tukhek
Merge trunk.

Show diffs side-by-side

added added

removed removed

Lines of Context:
43
43
        ]
44
44
    if compiled_groupcompress_feature.available():
45
45
        scenarios.append(('C',
46
 
            {'compressor': groupcompress.PyrexGroupCompressor}))
 
46
                          {'compressor': groupcompress.PyrexGroupCompressor}))
47
47
    return scenarios
48
48
 
49
49
 
72
72
    """Tests for GroupCompressor"""
73
73
 
74
74
    scenarios = group_compress_implementation_scenarios()
75
 
    compressor = None # Set by scenario
 
75
    compressor = None  # Set by scenario
76
76
 
77
77
    def test_empty_delta(self):
78
78
        compressor = self.compressor()
82
82
        # diff against NUKK
83
83
        compressor = self.compressor()
84
84
        sha1, start_point, end_point, _ = compressor.compress(('label',),
85
 
            b'strange\ncommon\n', None)
 
85
                                                              b'strange\ncommon\n', None)
86
86
        self.assertEqual(sha_string(b'strange\ncommon\n'), sha1)
87
87
        expected_lines = b'f\x0fstrange\ncommon\n'
88
88
        self.assertEqual(expected_lines, b''.join(compressor.chunks))
115
115
        # reading something that is in the compressor stream already.
116
116
        compressor = self.compressor()
117
117
        sha1_1, _, _, _ = compressor.compress(('label',),
118
 
            b'strange\ncommon long line\nthat needs a 16 byte match\n', None)
 
118
                                              b'strange\ncommon long line\nthat needs a 16 byte match\n', None)
119
119
        expected_lines = list(compressor.chunks)
120
120
        sha1_2, _, end_point, _ = compressor.compress(('newlabel',),
121
 
            b'common long line\nthat needs a 16 byte match\ndifferent\n', None)
 
121
                                                      b'common long line\nthat needs a 16 byte match\ndifferent\n', None)
122
122
        # get the first out
123
123
        self.assertEqual((b'strange\ncommon long line\n'
124
124
                          b'that needs a 16 byte match\n', sha1_1),
131
131
    def test_pop_last(self):
132
132
        compressor = self.compressor()
133
133
        _, _, _, _ = compressor.compress(('key1',),
134
 
            b'some text\nfor the first entry\n', None)
 
134
                                         b'some text\nfor the first entry\n', None)
135
135
        expected_lines = list(compressor.chunks)
136
136
        _, _, _, _ = compressor.compress(('key2',),
137
 
            b'some text\nfor the second entry\n', None)
 
137
                                         b'some text\nfor the second entry\n', None)
138
138
        compressor.pop_last()
139
139
        self.assertEqual(expected_lines, compressor.chunks)
140
140
 
166
166
    def test_two_nosha_delta(self):
167
167
        compressor = self.compressor()
168
168
        sha1_1, _, _, _ = compressor.compress(('label',),
169
 
            b'strange\ncommon long line\nthat needs a 16 byte match\n', None)
 
169
                                              b'strange\ncommon long line\nthat needs a 16 byte match\n', None)
170
170
        expected_lines = list(compressor.chunks)
171
171
        sha1_2, start_point, end_point, _ = compressor.compress(('newlabel',),
172
 
            b'common long line\nthat needs a 16 byte match\ndifferent\n', None)
 
172
                                                                b'common long line\nthat needs a 16 byte match\ndifferent\n', None)
173
173
        self.assertEqual(sha_string(b'common long line\n'
174
174
                                    b'that needs a 16 byte match\n'
175
175
                                    b'different\n'), sha1_2)
179
179
            # source and target length
180
180
            b'\x36',
181
181
            # copy the line common
182
 
            b'\x91\x0a\x2c', #copy, offset 0x0a, len 0x2c
 
182
            b'\x91\x0a\x2c',  # copy, offset 0x0a, len 0x2c
183
183
            # add the line different, and the trailing newline
184
 
            b'\x0adifferent\n', # insert 10 bytes
 
184
            b'\x0adifferent\n',  # insert 10 bytes
185
185
            ])
186
186
        self.assertEqualDiffEncoded(expected_lines, compressor.chunks)
187
187
        self.assertEqual(sum(map(len, expected_lines)), end_point)
191
191
        # both parents.
192
192
        compressor = self.compressor()
193
193
        sha1_1, _, _, _ = compressor.compress(('label',),
194
 
            b'strange\ncommon very very long line\nwith some extra text\n', None)
 
194
                                              b'strange\ncommon very very long line\nwith some extra text\n', None)
195
195
        sha1_2, _, _, _ = compressor.compress(('newlabel',),
196
 
            b'different\nmoredifferent\nand then some more\n', None)
 
196
                                              b'different\nmoredifferent\nand then some more\n', None)
197
197
        expected_lines = list(compressor.chunks)
198
198
        sha1_3, start_point, end_point, _ = compressor.compress(('label3',),
199
 
            b'new\ncommon very very long line\nwith some extra text\n'
200
 
            b'different\nmoredifferent\nand then some more\n',
201
 
            None)
 
199
                                                                b'new\ncommon very very long line\nwith some extra text\n'
 
200
                                                                b'different\nmoredifferent\nand then some more\n',
 
201
                                                                None)
202
202
        self.assertEqual(
203
203
            sha_string(b'new\ncommon very very long line\nwith some extra text\n'
204
204
                       b'different\nmoredifferent\nand then some more\n'),
211
211
            # insert new
212
212
            b'\x03new',
213
213
            # Copy of first parent 'common' range
214
 
            b'\x91\x09\x31' # copy, offset 0x09, 0x31 bytes
 
214
            b'\x91\x09\x31'  # copy, offset 0x09, 0x31 bytes
215
215
            # Copy of second parent 'different' range
216
 
            b'\x91\x3c\x2b' # copy, offset 0x3c, 0x2b bytes
 
216
            b'\x91\x3c\x2b'  # copy, offset 0x3c, 0x2b bytes
217
217
            ])
218
218
        self.assertEqualDiffEncoded(expected_lines, compressor.chunks)
219
219
        self.assertEqual(sum(map(len, expected_lines)), end_point)
245
245
    def test_two_nosha_delta(self):
246
246
        compressor = self.compressor()
247
247
        sha1_1, _, _, _ = compressor.compress(('label',),
248
 
            b'strange\ncommon long line\nthat needs a 16 byte match\n', None)
 
248
                                              b'strange\ncommon long line\nthat needs a 16 byte match\n', None)
249
249
        expected_lines = list(compressor.chunks)
250
250
        sha1_2, start_point, end_point, _ = compressor.compress(('newlabel',),
251
 
            b'common long line\nthat needs a 16 byte match\ndifferent\n', None)
 
251
                                                                b'common long line\nthat needs a 16 byte match\ndifferent\n', None)
252
252
        self.assertEqual(sha_string(b'common long line\n'
253
253
                                    b'that needs a 16 byte match\n'
254
254
                                    b'different\n'), sha1_2)
258
258
            # target length
259
259
            b'\x36',
260
260
            # copy the line common
261
 
            b'\x91\x0a\x2c', #copy, offset 0x0a, len 0x2c
 
261
            b'\x91\x0a\x2c',  # copy, offset 0x0a, len 0x2c
262
262
            # add the line different, and the trailing newline
263
 
            b'\x0adifferent\n', # insert 10 bytes
 
263
            b'\x0adifferent\n',  # insert 10 bytes
264
264
            ])
265
265
        self.assertEqualDiffEncoded(expected_lines, compressor.chunks)
266
266
        self.assertEqual(sum(map(len, expected_lines)), end_point)
270
270
        # both parents.
271
271
        compressor = self.compressor()
272
272
        sha1_1, _, _, _ = compressor.compress(('label',),
273
 
            b'strange\ncommon very very long line\nwith some extra text\n', None)
 
273
                                              b'strange\ncommon very very long line\nwith some extra text\n', None)
274
274
        sha1_2, _, _, _ = compressor.compress(('newlabel',),
275
 
            b'different\nmoredifferent\nand then some more\n', None)
 
275
                                              b'different\nmoredifferent\nand then some more\n', None)
276
276
        expected_lines = list(compressor.chunks)
277
277
        sha1_3, start_point, end_point, _ = compressor.compress(('label3',),
278
 
            b'new\ncommon very very long line\nwith some extra text\n'
279
 
            b'different\nmoredifferent\nand then some more\n',
280
 
            None)
 
278
                                                                b'new\ncommon very very long line\nwith some extra text\n'
 
279
                                                                b'different\nmoredifferent\nand then some more\n',
 
280
                                                                None)
281
281
        self.assertEqual(
282
282
            sha_string(b'new\ncommon very very long line\nwith some extra text\n'
283
283
                       b'different\nmoredifferent\nand then some more\n'),
290
290
            # insert new
291
291
            b'\x04new\n',
292
292
            # Copy of first parent 'common' range
293
 
            b'\x91\x0a\x30' # copy, offset 0x0a, 0x30 bytes
 
293
            b'\x91\x0a\x30'  # copy, offset 0x0a, 0x30 bytes
294
294
            # Copy of second parent 'different' range
295
 
            b'\x91\x3c\x2b' # copy, offset 0x3c, 0x2b bytes
 
295
            b'\x91\x3c\x2b'  # copy, offset 0x3c, 0x2b bytes
296
296
            ])
297
297
        self.assertEqualDiffEncoded(expected_lines, compressor.chunks)
298
298
        self.assertEqual(sum(map(len, expected_lines)), end_point)
327
327
        block._ensure_content()
328
328
        self.assertEqual(b'', block._content)
329
329
        self.assertEqual(b'', block._z_content)
330
 
        block._ensure_content() # Ensure content is safe to call 2x
 
330
        block._ensure_content()  # Ensure content is safe to call 2x
331
331
 
332
332
    def test_from_invalid(self):
333
333
        self.assertRaises(ValueError,
338
338
        content = (b'a tiny bit of content\n')
339
339
        z_content = zlib.compress(content)
340
340
        z_bytes = (
341
 
            b'gcb1z\n' # group compress block v1 plain
342
 
            b'%d\n' # Length of compressed content
343
 
            b'%d\n' # Length of uncompressed content
 
341
            b'gcb1z\n'  # group compress block v1 plain
 
342
            b'%d\n'  # Length of compressed content
 
343
            b'%d\n'  # Length of uncompressed content
344
344
            b'%s'   # Compressed content
345
345
            ) % (len(z_content), len(content), z_content)
346
346
        block = groupcompress.GroupCompressBlock.from_bytes(
365
365
        self.assertEqual(gcb._z_content_length, len(gcb._z_content))
366
366
        self.assertEqual(total_len, len(block_bytes))
367
367
        self.assertEqual(gcb._content_length, content_len)
368
 
        expected_header =(b'gcb1z\n' # group compress block v1 zlib
369
 
                          b'%d\n' # Length of compressed content
370
 
                          b'%d\n' # Length of uncompressed content
371
 
                         ) % (gcb._z_content_length, gcb._content_length)
 
368
        expected_header = (b'gcb1z\n'  # group compress block v1 zlib
 
369
                           b'%d\n'  # Length of compressed content
 
370
                           b'%d\n'  # Length of uncompressed content
 
371
                           ) % (gcb._z_content_length, gcb._content_length)
372
372
        # The first chunk should be the header chunk. It is small, fixed size,
373
373
        # and there is no compelling reason to split it up
374
374
        self.assertEqual(expected_header, block_chunks[0])
385
385
        data = gcb.to_bytes()
386
386
        self.assertEqual(gcb._z_content_length, len(gcb._z_content))
387
387
        self.assertEqual(gcb._content_length, len(content))
388
 
        expected_header =(b'gcb1z\n' # group compress block v1 zlib
389
 
                          b'%d\n' # Length of compressed content
390
 
                          b'%d\n' # Length of uncompressed content
391
 
                         ) % (gcb._z_content_length, gcb._content_length)
 
388
        expected_header = (b'gcb1z\n'  # group compress block v1 zlib
 
389
                           b'%d\n'  # Length of compressed content
 
390
                           b'%d\n'  # Length of uncompressed content
 
391
                           ) % (gcb._z_content_length, gcb._content_length)
392
392
        self.assertStartsWith(data, expected_header)
393
393
        remaining_bytes = data[len(expected_header):]
394
394
        raw_bytes = zlib.decompress(remaining_bytes)
398
398
        gcb = groupcompress.GroupCompressBlock()
399
399
        gcb.set_chunked_content([b'this is some content\n'
400
400
                                 b'this content will be compressed\n'],
401
 
                                 len(content))
 
401
                                len(content))
402
402
        old_data = data
403
403
        data = gcb.to_bytes()
404
404
        self.assertEqual(old_data, data)
485
485
                          (b'd', 21, len(key_to_text[(b'2',)]),
486
486
                           [(b'c', 2, len(dup_content)),
487
487
                            (b'i', len(b'2 extra special\n'), b'')
488
 
                           ]),
489
 
                         ], block._dump())
 
488
                            ]),
 
489
                          ], block._dump())
490
490
 
491
491
 
492
492
class TestCaseWithGroupCompressVersionedFiles(
497
497
        t = self.get_transport(dir)
498
498
        t.ensure_base()
499
499
        vf = groupcompress.make_pack_factory(graph=create_graph,
500
 
            delta=False, keylength=keylength,
501
 
            inconsistency_fatal=inconsistency_fatal)(t)
 
500
                                             delta=False, keylength=keylength,
 
501
                                             inconsistency_fatal=inconsistency_fatal)(t)
502
502
        if do_cleanup:
503
503
            self.addCleanup(groupcompress.cleanup_pack_group, vf)
504
504
        return vf
517
517
 
518
518
    def make_g_index_missing_parent(self):
519
519
        graph_index = self.make_g_index('missing_parent', 1,
520
 
            [((b'parent', ), b'2 78 2 10', ([],)),
521
 
             ((b'tip', ), b'2 78 2 10',
522
 
              ([(b'parent', ), (b'missing-parent', )],)),
523
 
              ])
 
520
                                        [((b'parent', ), b'2 78 2 10', ([],)),
 
521
                                         ((b'tip', ), b'2 78 2 10',
 
522
                                            ([(b'parent', ), (b'missing-parent', )],)),
 
523
                                         ])
524
524
        return graph_index
525
525
 
526
526
    def test_get_record_stream_as_requested(self):
533
533
        vf.add_lines((b'd',), (), [b'lines\n'])
534
534
        vf.writer.end()
535
535
        keys = [record.key for record in vf.get_record_stream(
536
 
                    [(b'a',), (b'b',), (b'c',), (b'd',)],
537
 
                    'as-requested', False)]
 
536
            [(b'a',), (b'b',), (b'c',), (b'd',)],
 
537
            'as-requested', False)]
538
538
        self.assertEqual([(b'a',), (b'b',), (b'c',), (b'd',)], keys)
539
539
        keys = [record.key for record in vf.get_record_stream(
540
 
                    [(b'b',), (b'a',), (b'd',), (b'c',)],
541
 
                    'as-requested', False)]
 
540
            [(b'b',), (b'a',), (b'd',), (b'c',)],
 
541
            'as-requested', False)]
542
542
        self.assertEqual([(b'b',), (b'a',), (b'd',), (b'c',)], keys)
543
543
 
544
544
        # It should work even after being repacked into another VF
545
545
        vf2 = self.make_test_vf(False, dir='target')
546
546
        vf2.insert_record_stream(vf.get_record_stream(
547
 
                    [(b'b',), (b'a',), (b'd',), (b'c',)], 'as-requested', False))
 
547
            [(b'b',), (b'a',), (b'd',), (b'c',)], 'as-requested', False))
548
548
        vf2.writer.end()
549
549
 
550
550
        keys = [record.key for record in vf2.get_record_stream(
551
 
                    [(b'a',), (b'b',), (b'c',), (b'd',)],
552
 
                    'as-requested', False)]
 
551
            [(b'a',), (b'b',), (b'c',), (b'd',)],
 
552
            'as-requested', False)]
553
553
        self.assertEqual([(b'a',), (b'b',), (b'c',), (b'd',)], keys)
554
554
        keys = [record.key for record in vf2.get_record_stream(
555
 
                    [(b'b',), (b'a',), (b'd',), (b'c',)],
556
 
                    'as-requested', False)]
 
555
            [(b'b',), (b'a',), (b'd',), (b'c',)],
 
556
            'as-requested', False)]
557
557
        self.assertEqual([(b'b',), (b'a',), (b'd',), (b'c',)], keys)
558
558
 
559
559
    def test_get_record_stream_max_bytes_to_index_default(self):
620
620
        keys = [(r.encode(),) for r in 'abcdefgh']
621
621
        # ordering in 'groupcompress' order, should actually swap the groups in
622
622
        # the target vf, but the groups themselves should not be disturbed.
 
623
 
623
624
        def small_size_stream():
624
625
            for record in vf.get_record_stream(keys, 'groupcompress', False):
625
626
                record._manager._full_enough_block_size = \
697
698
        unvalidated = self.make_g_index_missing_parent()
698
699
        combined = _mod_index.CombinedGraphIndex([unvalidated])
699
700
        index = groupcompress._GCGraphIndex(combined,
700
 
            is_locked=lambda: True, parents=True,
701
 
            track_external_parent_refs=True)
 
701
                                            is_locked=lambda: True, parents=True,
 
702
                                            track_external_parent_refs=True)
702
703
        index.scan_unvalidated_index(unvalidated)
703
704
        self.assertEqual(
704
705
            frozenset([(b'missing-parent',)]), index.get_missing_parents())
708
709
        mod_index = btree_index.BTreeBuilder(1, 1)
709
710
        combined = _mod_index.CombinedGraphIndex([g_index, mod_index])
710
711
        index = groupcompress._GCGraphIndex(combined,
711
 
            is_locked=lambda: True, parents=True,
712
 
            add_callback=mod_index.add_nodes,
713
 
            track_external_parent_refs=True)
 
712
                                            is_locked=lambda: True, parents=True,
 
713
                                            add_callback=mod_index.add_nodes,
 
714
                                            track_external_parent_refs=True)
714
715
        index.add_records([
715
716
            ((b'new-key',), b'2 10 2 10', [((b'parent-1',), (b'parent-2',))])])
716
717
        self.assertEqual(
731
732
        target = self.make_test_vf(True, dir='target',
732
733
                                   inconsistency_fatal=inconsistency_fatal)
733
734
        for x in range(2):
734
 
            source = self.make_source_with_b(x==1, 'source%s' % x)
 
735
            source = self.make_source_with_b(x == 1, 'source%s' % x)
735
736
            target.insert_record_stream(source.get_record_stream(
736
737
                [(b'b',)], 'unordered', False))
737
738
 
738
739
    def test_inconsistent_redundant_inserts_warn(self):
739
740
        """Should not insert a record that is already present."""
740
741
        warnings = []
 
742
 
741
743
        def warning(template, args):
742
744
            warnings.append(template % args)
743
745
        _trace_warning = trace.warning
777
779
        t = self.get_transport('.')
778
780
        t.ensure_base()
779
781
        factory = groupcompress.make_pack_factory(graph=True,
780
 
            delta=False, keylength=1, inconsistency_fatal=True)
 
782
                                                  delta=False, keylength=1, inconsistency_fatal=True)
781
783
        vf = factory(t)
782
784
        self.addCleanup(groupcompress.cleanup_pack_group, vf)
783
785
        return vf
818
820
    def __init__(self, canned_get_blocks=None):
819
821
        self._group_cache = {}
820
822
        self._canned_get_blocks = canned_get_blocks or []
 
823
 
821
824
    def _get_blocks(self, read_memos):
822
825
        return iter(self._canned_get_blocks)
823
 
    
 
826
 
824
827
 
825
828
class Test_BatchingBlockFetcher(TestCaseWithGroupCompressVersionedFiles):
826
829
    """Simple whitebox unit tests for _BatchingBlockFetcher."""
827
 
    
 
830
 
828
831
    def test_add_key_new_read_memo(self):
829
832
        """Adding a key with an uncached read_memo new to this batch adds that
830
833
        read_memo to the list of memos to fetch.
923
926
 
924
927
    _texts = {
925
928
        (b'key1',): b"this is a text\n"
926
 
                   b"with a reasonable amount of compressible bytes\n"
927
 
                   b"which can be shared between various other texts\n",
 
929
        b"with a reasonable amount of compressible bytes\n"
 
930
        b"which can be shared between various other texts\n",
928
931
        (b'key2',): b"another text\n"
929
 
                   b"with a reasonable amount of compressible bytes\n"
930
 
                   b"which can be shared between various other texts\n",
 
932
        b"with a reasonable amount of compressible bytes\n"
 
933
        b"which can be shared between various other texts\n",
931
934
        (b'key3',): b"yet another text which won't be extracted\n"
932
 
                   b"with a reasonable amount of compressible bytes\n"
933
 
                   b"which can be shared between various other texts\n",
 
935
        b"with a reasonable amount of compressible bytes\n"
 
936
        b"which can be shared between various other texts\n",
934
937
        (b'key4',): b"this will be extracted\n"
935
 
                   b"but references most of its bytes from\n"
936
 
                   b"yet another text which won't be extracted\n"
937
 
                   b"with a reasonable amount of compressible bytes\n"
938
 
                   b"which can be shared between various other texts\n",
 
938
        b"but references most of its bytes from\n"
 
939
        b"yet another text which won't be extracted\n"
 
940
        b"with a reasonable amount of compressible bytes\n"
 
941
        b"which can be shared between various other texts\n",
939
942
    }
 
943
 
940
944
    def make_block(self, key_to_text):
941
945
        """Create a GroupCompressBlock, filling it with the given texts."""
942
946
        compressor = groupcompress.GroupCompressor()
994
998
        self.assertTrue(block_length > len(stripped_block))
995
999
        empty_z_header = zlib.compress(b'')
996
1000
        self.assertEqual(b'groupcompress-block\n'
997
 
                         b'8\n' # len(compress(''))
998
 
                         b'0\n' # len('')
999
 
                         b'%d\n'# compressed block len
 
1001
                         b'8\n'  # len(compress(''))
 
1002
                         b'0\n'  # len('')
 
1003
                         b'%d\n'  # compressed block len
1000
1004
                         b'%s'  # zheader
1001
1005
                         b'%s'  # block
1002
1006
                         % (len(stripped_block), empty_z_header,
1026
1030
        entry4 = locations[(b'key4',)]
1027
1031
        self.assertEqualDiff(b'key1\n'
1028
1032
                             b'\n'  # no parents
1029
 
                             b'%d\n' # start offset
1030
 
                             b'%d\n' # end offset
 
1033
                             b'%d\n'  # start offset
 
1034
                             b'%d\n'  # end offset
1031
1035
                             b'key4\n'
1032
1036
                             b'\n'
1033
1037
                             b'%d\n'
1034
1038
                             b'%d\n'
1035
1039
                             % (entry1[0], entry1[1],
1036
1040
                                entry4[0], entry4[1]),
1037
 
                            header)
 
1041
                             header)
1038
1042
        z_block = rest[z_header_len:]
1039
1043
        self.assertEqual(block_bytes, z_block)
1040
1044
 
1101
1105
    def test_manager_custom_compressor_settings(self):
1102
1106
        locations, old_block = self.make_block(self._texts)
1103
1107
        called = []
 
1108
 
1104
1109
        def compressor_settings():
1105
1110
            called.append('called')
1106
1111
            return (10,)
1107
1112
        manager = groupcompress._LazyGroupContentManager(old_block,
1108
 
            get_compressor_settings=compressor_settings)
 
1113
                                                         get_compressor_settings=compressor_settings)
1109
1114
        gcvf = groupcompress.GroupCompressVersionedFiles
1110
1115
        # It doesn't greedily evaluate compressor_settings
1111
1116
        self.assertIs(None, manager._compressor_settings)
1119
1124
        if not isinstance(groupcompress.GroupCompressor,
1120
1125
                          groupcompress.PyrexGroupCompressor):
1121
1126
            raise tests.TestNotApplicable('pure-python compressor'
1122
 
                ' does not handle compressor_settings')
 
1127
                                          ' does not handle compressor_settings')
1123
1128
        locations, old_block = self.make_block(self._texts)
1124
1129
        manager = groupcompress._LazyGroupContentManager(old_block,
1125
 
            get_compressor_settings=lambda: dict(max_bytes_to_index=32))
 
1130
                                                         get_compressor_settings=lambda: dict(max_bytes_to_index=32))
1126
1131
        gc = manager._make_group_compressor()
1127
1132
        self.assertEqual(32, gc._delta_index._max_bytes_to_index)
1128
1133
        self.add_key_to_manager((b'key3',), locations, old_block, manager)
1191
1196
        # _GCBuildDetails inlines some of the data that used to be spread out
1192
1197
        # across a bunch of tuples
1193
1198
        bd = groupcompress._GCBuildDetails((('parent1',), ('parent2',)),
1194
 
            ('INDEX', 10, 20, 0, 5))
 
1199
                                           ('INDEX', 10, 20, 0, 5))
1195
1200
        self.assertEqual(4, len(bd))
1196
1201
        self.assertEqual(('INDEX', 10, 20, 0, 5), bd[0])
1197
 
        self.assertEqual(None, bd[1]) # Compression Parent is always None
 
1202
        self.assertEqual(None, bd[1])  # Compression Parent is always None
1198
1203
        self.assertEqual((('parent1',), ('parent2',)), bd[2])
1199
 
        self.assertEqual(('group', None), bd[3]) # Record details
 
1204
        self.assertEqual(('group', None), bd[3])  # Record details
1200
1205
 
1201
1206
    def test__repr__(self):
1202
1207
        bd = groupcompress._GCBuildDetails((('parent1',), ('parent2',)),
1203
 
            ('INDEX', 10, 20, 0, 5))
 
1208
                                           ('INDEX', 10, 20, 0, 5))
1204
1209
        self.assertEqual("_GCBuildDetails(('INDEX', 10, 20, 0, 5),"
1205
1210
                         " (('parent1',), ('parent2',)))",
1206
1211
                         repr(bd))