/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/per_versionedfile.py

  • Committer: Jelmer Vernooij
  • Date: 2019-05-29 03:22:34 UTC
  • mfrom: (7303 work)
  • mto: This revision was merged to the branch mainline in revision 7306.
  • Revision ID: jelmer@jelmer.uk-20190529032234-mt3fuws8gq03tapi
Merge trunk.

Show diffs side-by-side

added added

removed removed

Lines of Context:
98
98
    f.add_lines(b'left', [b'base'], [b'base\n', b'left' + last_char])
99
99
    if not left_only:
100
100
        f.add_lines(b'right', [b'base'],
101
 
            [b'base\n', b'right' + last_char])
 
101
                    [b'base\n', b'right' + last_char])
102
102
        f.add_lines(b'merged', [b'left', b'right'],
103
 
            [b'base\n', b'left\n', b'right\n', b'merged' + last_char])
 
103
                    [b'base\n', b'left\n', b'right\n', b'merged' + last_char])
104
104
    return f, parents
105
105
 
106
106
 
107
107
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
108
 
    nograph=False, nokeys=False):
 
108
                      nograph=False, nokeys=False):
109
109
    """Get a diamond graph to exercise deltas and merges.
110
110
 
111
111
    This creates a 5-node graph in files. If files supports 2-length keys two
134
134
    else:
135
135
        last_char = b''
136
136
    result = []
 
137
 
137
138
    def get_parents(suffix_list):
138
139
        if nograph:
139
140
            return ()
140
141
        else:
141
142
            result = [prefix + suffix for suffix in suffix_list]
142
143
            return result
 
144
 
143
145
    def get_key(suffix):
144
146
        if nokeys:
145
147
            return (None, )
149
151
    # which is how commit operates.
150
152
    for prefix in prefixes:
151
153
        result.append(files.add_lines(prefix + get_key(b'origin'), (),
152
 
            [b'origin' + last_char]))
 
154
                                      [b'origin' + last_char]))
153
155
    for prefix in prefixes:
154
156
        result.append(files.add_lines(prefix + get_key(b'base'),
155
 
            get_parents([(b'origin',)]), [b'base' + last_char]))
 
157
                                      get_parents([(b'origin',)]), [b'base' + last_char]))
156
158
    for prefix in prefixes:
157
159
        result.append(files.add_lines(prefix + get_key(b'left'),
158
 
            get_parents([(b'base',)]),
159
 
            [b'base\n', b'left' + last_char]))
 
160
                                      get_parents([(b'base',)]),
 
161
                                      [b'base\n', b'left' + last_char]))
160
162
    if not left_only:
161
163
        for prefix in prefixes:
162
164
            result.append(files.add_lines(prefix + get_key(b'right'),
163
 
                get_parents([(b'base',)]),
164
 
                [b'base\n', b'right' + last_char]))
 
165
                                          get_parents([(b'base',)]),
 
166
                                          [b'base\n', b'right' + last_char]))
165
167
        for prefix in prefixes:
166
168
            result.append(files.add_lines(prefix + get_key(b'merged'),
167
 
                get_parents([(b'left',), (b'right',)]),
168
 
                [b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
 
169
                                          get_parents(
 
170
                                              [(b'left',), (b'right',)]),
 
171
                                          [b'base\n', b'left\n', b'right\n', b'merged' + last_char]))
169
172
    return result
170
173
 
171
174
 
186
189
        f = self.get_file()
187
190
        f.add_lines(b'r0', [], [b'a\n', b'b\n'])
188
191
        f.add_lines(b'r1', [b'r0'], [b'b\n', b'c\n'])
 
192
 
189
193
        def verify_file(f):
190
194
            versions = f.versions()
191
195
            self.assertTrue(b'r0' in versions)
197
201
            self.assertEqual(2, f.num_versions())
198
202
 
199
203
            self.assertRaises(RevisionNotPresent,
200
 
                f.add_lines, b'r2', [b'foo'], [])
 
204
                              f.add_lines, b'r2', [b'foo'], [])
201
205
            self.assertRaises(RevisionAlreadyPresent,
202
 
                f.add_lines, b'r1', [], [])
 
206
                              f.add_lines, b'r1', [], [])
203
207
        verify_file(f)
204
208
        # this checks that reopen with create=True does not break anything.
205
209
        f = self.reopen_file(create=True)
211
215
        _, _, parent_texts[b'r0'] = f.add_lines(b'r0', [], [b'a\n', b'b\n'])
212
216
        try:
213
217
            _, _, parent_texts[b'r1'] = f.add_lines_with_ghosts(b'r1',
214
 
                [b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
 
218
                                                                [b'r0', b'ghost'], [b'b\n', b'c\n'], parent_texts=parent_texts)
215
219
        except NotImplementedError:
216
220
            # if the format doesn't support ghosts, just add normally.
217
221
            _, _, parent_texts[b'r1'] = f.add_lines(b'r1',
218
 
                [b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
219
 
        f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'], parent_texts=parent_texts)
 
222
                                                    [b'r0'], [b'b\n', b'c\n'], parent_texts=parent_texts)
 
223
        f.add_lines(b'r2', [b'r1'], [b'c\n', b'd\n'],
 
224
                    parent_texts=parent_texts)
220
225
        self.assertNotEqual(None, parent_texts[b'r0'])
221
226
        self.assertNotEqual(None, parent_texts[b'r1'])
 
227
 
222
228
        def verify_file(f):
223
229
            versions = f.versions()
224
230
            self.assertTrue(b'r0' in versions)
244
250
        # versioned files version sequences of bytes only.
245
251
        vf = self.get_file()
246
252
        self.assertRaises(errors.BzrBadParameterUnicode,
247
 
            vf.add_lines, b'a', [], [b'a\n', u'b\n', b'c\n'])
 
253
                          vf.add_lines, b'a', [], [b'a\n', u'b\n', b'c\n'])
248
254
        self.assertRaises(
249
255
            (errors.BzrBadParameterUnicode, NotImplementedError),
250
256
            vf.add_lines_with_ghosts, b'a', [], [b'a\n', u'b\n', b'c\n'])
270
276
        # \r characters are not permitted in lines being added
271
277
        vf = self.get_file()
272
278
        self.assertRaises(errors.BzrBadParameterContainsNewline,
273
 
            vf.add_lines, b'a', [], [b'a\n\n'])
 
279
                          vf.add_lines, b'a', [], [b'a\n\n'])
274
280
        self.assertRaises(
275
281
            (errors.BzrBadParameterContainsNewline, NotImplementedError),
276
282
            vf.add_lines_with_ghosts, b'a', [], [b'a\n\n'])
284
290
    def test_add_reserved(self):
285
291
        vf = self.get_file()
286
292
        self.assertRaises(errors.ReservedId,
287
 
            vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
 
293
                          vf.add_lines, b'a:', [], [b'a\n', b'b\n', b'c\n'])
288
294
 
289
295
    def test_add_lines_nostoresha(self):
290
296
        """When nostore_sha is supplied using old content raises."""
298
304
            shas.append(sha)
299
305
        # we now have a copy of all the lines in the vf.
300
306
        for sha, (version, lines) in zip(
301
 
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
 
307
                shas, (empty_text, sample_text_nl, sample_text_no_nl)):
302
308
            self.assertRaises(errors.ExistingContent,
303
 
                vf.add_lines, version + b"2", [], lines,
304
 
                nostore_sha=sha)
 
309
                              vf.add_lines, version + b"2", [], lines,
 
310
                              nostore_sha=sha)
305
311
            # and no new version should have been added.
306
312
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
307
 
                version + b"2")
 
313
                              version + b"2")
308
314
 
309
315
    def test_add_lines_with_ghosts_nostoresha(self):
310
316
        """When nostore_sha is supplied using old content raises."""
323
329
        except NotImplementedError:
324
330
            raise TestSkipped("add_lines_with_ghosts is optional")
325
331
        for sha, (version, lines) in zip(
326
 
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
 
332
                shas, (empty_text, sample_text_nl, sample_text_no_nl)):
327
333
            self.assertRaises(errors.ExistingContent,
328
 
                vf.add_lines_with_ghosts, version + b"2", [], lines,
329
 
                nostore_sha=sha)
 
334
                              vf.add_lines_with_ghosts, version + b"2", [], lines,
 
335
                              nostore_sha=sha)
330
336
            # and no new version should have been added.
331
337
            self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
332
 
                version + b"2")
 
338
                              version + b"2")
333
339
 
334
340
    def test_add_lines_return_value(self):
335
341
        # add_lines should return the sha1 and the text size.
345
351
            result = vf.add_lines(version, [], lines)
346
352
            self.assertEqual(3, len(result))
347
353
            self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
348
 
                result[0:2])
 
354
                             result[0:2])
349
355
        # parents should not affect the result:
350
356
        lines = sample_text_nl[1]
351
357
        self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
352
 
            vf.add_lines(b'd', [b'b', b'c'], lines)[0:2])
 
358
                         vf.add_lines(b'd', [b'b', b'c'], lines)[0:2])
353
359
 
354
360
    def test_get_reserved(self):
355
361
        vf = self.get_file()
403
409
            # (which is what this test tests) will generate a correct line
404
410
            # delta (which is to say, an empty delta).
405
411
            vf.add_lines(version, parents, lines,
406
 
                left_matching_blocks=[(0, 0, 1)])
 
412
                         left_matching_blocks=[(0, 0, 1)])
407
413
            parents = [version]
408
414
            versions.append(version)
409
415
            version_lines[version] = lines
423
429
        vf = self.get_file('fulltext')
424
430
        vf.add_lines(b'noeol', [], [b'line'])
425
431
        vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
426
 
            left_matching_blocks=[(0, 1, 1)])
 
432
                     left_matching_blocks=[(0, 1, 1)])
427
433
        self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
428
434
        # On top of a delta
429
435
        vf = self.get_file('delta')
430
436
        vf.add_lines(b'base', [], [b'line'])
431
437
        vf.add_lines(b'noeol', [b'base'], [b'prelude\n', b'line'])
432
438
        vf.add_lines(b'noeol2', [b'noeol'], [b'newline\n', b'line'],
433
 
            left_matching_blocks=[(1, 1, 1)])
 
439
                     left_matching_blocks=[(1, 1, 1)])
434
440
        self.assertEqualDiff(b'newline\nline', vf.get_text(b'noeol2'))
435
441
 
436
442
    def test_make_mpdiffs(self):
452
458
        except NotImplementedError:
453
459
            # old Weave formats do not allow ghosts
454
460
            return
455
 
        self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, [b'ghost'])
 
461
        self.assertRaises(errors.RevisionNotPresent,
 
462
                          vf.make_mpdiffs, [b'ghost'])
456
463
 
457
464
    def _setup_for_deltas(self, f):
458
465
        self.assertFalse(f.has_version('base'))
476
483
        # this is done by making it a merge of two parents with no common
477
484
        # anestry: noeolbase and noeol with the
478
485
        # later-inserted parent the leftmost.
479
 
        f.add_lines(b'eolbeforefirstparent', [b'noeolbase', b'noeol'], [b'line'])
 
486
        f.add_lines(b'eolbeforefirstparent', [
 
487
                    b'noeolbase', b'noeol'], [b'line'])
480
488
        # two identical eol texts
481
489
        f.add_lines(b'noeoldup', [b'noeol'], [b'line'])
482
490
        next_parent = b'base'
550
558
        self.assertTrue(r2 < rM)
551
559
 
552
560
        self.assertRaises(RevisionNotPresent,
553
 
            f.get_ancestry, [b'rM', b'rX'])
 
561
                          f.get_ancestry, [b'rM', b'rX'])
554
562
 
555
563
        self.assertEqual(set(f.get_ancestry(b'rM')),
556
 
            set(f.get_ancestry(b'rM', topo_sorted=False)))
 
564
                         set(f.get_ancestry(b'rM', topo_sorted=False)))
557
565
 
558
566
    def test_mutate_after_finish(self):
559
567
        self._transaction = 'before'
560
568
        f = self.get_file()
561
569
        self._transaction = 'after'
562
570
        self.assertRaises(errors.OutSideTransaction, f.add_lines, b'', [], [])
563
 
        self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, b'', [], [])
 
571
        self.assertRaises(errors.OutSideTransaction,
 
572
                          f.add_lines_with_ghosts, b'', [], [])
564
573
 
565
574
    def test_copy_to(self):
566
575
        f = self.get_file()
579
588
        f = self.get_file()
580
589
        f.add_lines(b'r0', [], [b'a\n', b'b\n'])
581
590
        self.assertEqual(
582
 
            {b'r0':()}, f.get_parent_map([b'r0']))
 
591
            {b'r0': ()}, f.get_parent_map([b'r0']))
583
592
        f.add_lines(b'r1', [b'r0'], [b'a\n', b'b\n'])
584
593
        self.assertEqual(
585
 
            {b'r1':(b'r0',)}, f.get_parent_map([b'r1']))
 
594
            {b'r1': (b'r0',)}, f.get_parent_map([b'r1']))
586
595
        self.assertEqual(
587
 
            {b'r0':(),
588
 
             b'r1':(b'r0',)},
 
596
            {b'r0': (),
 
597
             b'r1': (b'r0',)},
589
598
            f.get_parent_map([b'r0', b'r1']))
590
599
        f.add_lines(b'r2', [], [b'a\n', b'b\n'])
591
600
        f.add_lines(b'r3', [], [b'a\n', b'b\n'])
592
601
        f.add_lines(b'm', [b'r0', b'r1', b'r2', b'r3'], [b'a\n', b'b\n'])
593
602
        self.assertEqual(
594
 
            {b'm':(b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
 
603
            {b'm': (b'r0', b'r1', b'r2', b'r3')}, f.get_parent_map([b'm']))
595
604
        self.assertEqual({}, f.get_parent_map(b'y'))
596
605
        self.assertEqual(
597
 
            {b'r0':(),
598
 
             b'r1':(b'r0',)},
 
606
            {b'r0': (),
 
607
             b'r1': (b'r0',)},
599
608
            f.get_parent_map([b'r0', b'y', b'r1']))
600
609
 
601
610
    def test_annotate(self):
607
616
        self.assertEqual(origins[1][0], b'r0')
608
617
 
609
618
        self.assertRaises(RevisionNotPresent,
610
 
            f.annotate, b'foo')
 
619
                          f.annotate, b'foo')
611
620
 
612
621
    def test_detection(self):
613
622
        # Test weaves detect corruption.
666
675
        vf.add_lines(b'otherchild',
667
676
                     [b'lancestor', b'base'],
668
677
                     [b'base\n', b'lancestor\n', b'otherchild\n'])
 
678
 
669
679
        def iter_with_versions(versions, expected):
670
680
            # now we need to see what lines are returned, and how often.
671
681
            lines = {}
672
682
            progress = InstrumentedProgress()
673
683
            # iterate over the lines
674
684
            for line in vf.iter_lines_added_or_present_in_versions(versions,
675
 
                pb=progress):
 
685
                                                                   pb=progress):
676
686
                lines.setdefault(line, 0)
677
687
                lines[line] += 1
678
 
            if []!= progress.updates:
 
688
            if [] != progress.updates:
679
689
                self.assertEqual(expected, progress.updates)
680
690
            return lines
681
691
        lines = iter_with_versions([b'child', b'otherchild'],
715
725
            vf.add_lines_with_ghosts(b'notbxbfse', [parent_id_utf8], [])
716
726
        except NotImplementedError:
717
727
            # check the other ghost apis are also not implemented
718
 
            self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, [b'foo'])
719
 
            self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, b'foo')
 
728
            self.assertRaises(NotImplementedError,
 
729
                              vf.get_ancestry_with_ghosts, [b'foo'])
 
730
            self.assertRaises(NotImplementedError,
 
731
                              vf.get_parents_with_ghosts, b'foo')
720
732
            return
721
733
        vf = self.reopen_file()
722
734
        # test key graph related apis: getncestry, _graph, get_parents
725
737
        self.assertEqual([b'notbxbfse'], vf.get_ancestry(b'notbxbfse'))
726
738
        self.assertFalse(vf.has_version(parent_id_utf8))
727
739
        # we have _with_ghost apis to give us ghost information.
728
 
        self.assertEqual([parent_id_utf8, b'notbxbfse'], vf.get_ancestry_with_ghosts([b'notbxbfse']))
729
 
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts(b'notbxbfse'))
 
740
        self.assertEqual([parent_id_utf8, b'notbxbfse'],
 
741
                         vf.get_ancestry_with_ghosts([b'notbxbfse']))
 
742
        self.assertEqual([parent_id_utf8],
 
743
                         vf.get_parents_with_ghosts(b'notbxbfse'))
730
744
        # if we add something that is a ghost of another, it should correct the
731
745
        # results of the prior apis
732
746
        vf.add_lines(parent_id_utf8, [], [])
733
 
        self.assertEqual([parent_id_utf8, b'notbxbfse'], vf.get_ancestry([b'notbxbfse']))
734
 
        self.assertEqual({b'notbxbfse':(parent_id_utf8,)},
735
 
            vf.get_parent_map([b'notbxbfse']))
 
747
        self.assertEqual([parent_id_utf8, b'notbxbfse'],
 
748
                         vf.get_ancestry([b'notbxbfse']))
 
749
        self.assertEqual({b'notbxbfse': (parent_id_utf8,)},
 
750
                         vf.get_parent_map([b'notbxbfse']))
736
751
        self.assertTrue(vf.has_version(parent_id_utf8))
737
752
        # we have _with_ghost apis to give us ghost information.
738
753
        self.assertEqual([parent_id_utf8, b'notbxbfse'],
739
 
            vf.get_ancestry_with_ghosts([b'notbxbfse']))
740
 
        self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts(b'notbxbfse'))
 
754
                         vf.get_ancestry_with_ghosts([b'notbxbfse']))
 
755
        self.assertEqual([parent_id_utf8],
 
756
                         vf.get_parents_with_ghosts(b'notbxbfse'))
741
757
 
742
758
    def test_add_lines_with_ghosts_after_normal_revs(self):
743
759
        # some versioned file formats allow lines to be added with parent
803
819
 
804
820
        # We are going to invasively corrupt the text
805
821
        # Make sure the internals of weave are the same
806
 
        self.assertEqual([(b'{', 0)
807
 
                         , b'hello\n'
808
 
                        , (b'}', None)
809
 
                        , (b'{', 1)
810
 
                        , b'there\n'
811
 
                        , (b'}', None)
812
 
                        ], w._weave)
 
822
        self.assertEqual([(b'{', 0), b'hello\n', (b'}', None), (b'{', 1), b'there\n', (b'}', None)
 
823
                          ], w._weave)
813
824
 
814
 
        self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f'
815
 
                        , b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
816
 
                        ], w._sha1s)
 
825
        self.assertEqual([b'f572d396fae9206628714fb2ce00f72e94f2258f', b'90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
 
826
                          ], w._sha1s)
817
827
        w.check()
818
828
 
819
829
        # Corrupted
826
836
        w._weave[4] = b'there\n'
827
837
        self.assertEqual(b'hello\nthere\n', w.get_text(b'v2'))
828
838
 
829
 
        #Invalid checksum, first digit changed
830
 
        w._sha1s[1] =b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
 
839
        # Invalid checksum, first digit changed
 
840
        w._sha1s[1] = b'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
831
841
        return w
832
842
 
833
843
    def reopen_file(self, name='foo', create=False):
860
870
    def test_add_lines(self):
861
871
        self.plan_merge_vf.add_lines((b'root', b'a:'), [], [])
862
872
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
863
 
            (b'root', b'a'), [], [])
864
 
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
865
 
            (b'root', b'a:'), None, [])
866
 
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
867
 
            (b'root', b'a:'), [], None)
 
873
                          (b'root', b'a'), [], [])
 
874
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
 
875
                          (b'root', b'a:'), None, [])
 
876
        self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
 
877
                          (b'root', b'a:'), [], None)
868
878
 
869
879
    def setup_abcde(self):
870
880
        self.vf1.add_lines((b'root', b'A'), [], [b'a'])
872
882
        self.vf2.add_lines((b'root', b'C'), [], [b'c'])
873
883
        self.vf2.add_lines((b'root', b'D'), [(b'root', b'C')], [b'd'])
874
884
        self.plan_merge_vf.add_lines((b'root', b'E:'),
875
 
            [(b'root', b'B'), (b'root', b'D')], [b'e'])
 
885
                                     [(b'root', b'B'), (b'root', b'D')], [b'e'])
876
886
 
877
887
    def test_get_parents(self):
878
888
        self.setup_abcde()
879
889
        self.assertEqual({(b'root', b'B'): ((b'root', b'A'),)},
880
 
            self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
 
890
                         self.plan_merge_vf.get_parent_map([(b'root', b'B')]))
881
891
        self.assertEqual({(b'root', b'D'): ((b'root', b'C'),)},
882
 
            self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
 
892
                         self.plan_merge_vf.get_parent_map([(b'root', b'D')]))
883
893
        self.assertEqual({(b'root', b'E:'): ((b'root', b'B'), (b'root', b'D'))},
884
 
            self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
 
894
                         self.plan_merge_vf.get_parent_map([(b'root', b'E:')]))
885
895
        self.assertEqual({},
886
 
            self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
 
896
                         self.plan_merge_vf.get_parent_map([(b'root', b'F')]))
887
897
        self.assertEqual({
888
 
                (b'root', b'B'): ((b'root', b'A'),),
889
 
                (b'root', b'D'): ((b'root', b'C'),),
890
 
                (b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
891
 
                },
 
898
            (b'root', b'B'): ((b'root', b'A'),),
 
899
            (b'root', b'D'): ((b'root', b'C'),),
 
900
            (b'root', b'E:'): ((b'root', b'B'), (b'root', b'D')),
 
901
            },
892
902
            self.plan_merge_vf.get_parent_map(
893
903
                [(b'root', b'B'), (b'root', b'D'), (b'root', b'E:'), (b'root', b'F')]))
894
904
 
895
905
    def test_get_record_stream(self):
896
906
        self.setup_abcde()
 
907
 
897
908
        def get_record(suffix):
898
909
            return next(self.plan_merge_vf.get_record_stream(
899
910
                [(b'root', suffix)], 'unordered', True))
913
924
        vf = self.get_file()
914
925
        # try an empty file access
915
926
        readonly_vf = self.get_factory()('foo',
916
 
            transport.get_transport_from_url(self.get_readonly_url('.')))
 
927
                                         transport.get_transport_from_url(self.get_readonly_url('.')))
917
928
        self.assertEqual([], readonly_vf.versions())
918
929
 
919
930
    def test_readonly_http_works_with_feeling(self):
923
934
        vf.add_lines(b'1', [], [b'a\n'])
924
935
        vf.add_lines(b'2', [b'1'], [b'b\n', b'a\n'])
925
936
        readonly_vf = self.get_factory()('foo',
926
 
            transport.get_transport_from_url(self.get_readonly_url('.')))
 
937
                                         transport.get_transport_from_url(self.get_readonly_url('.')))
927
938
        self.assertEqual([b'1', b'2'], vf.versions())
928
939
        self.assertEqual([b'1', b'2'], readonly_vf.versions())
929
940
        for version in readonly_vf.versions():
989
1000
                     [b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'],
990
1001
                     [b'aaa', b'xxx', b'bbb', b'yyy', b'ccc'])
991
1002
    overlappedInsertExpected = [b'aaa', b'xxx', b'yyy', b'bbb']
 
1003
 
992
1004
    def testOverlappedInsert(self):
993
1005
        self.doMerge([b'aaa', b'bbb'],
994
1006
                     [b'aaa', b'xxx', b'yyy', b'bbb'],
997
1009
        # really it ought to reduce this to
998
1010
        # [b'aaa', b'xxx', b'yyy', b'bbb']
999
1011
 
1000
 
 
1001
1012
    def testClashReplace(self):
1002
1013
        self.doMerge([b'aaa'],
1003
1014
                     [b'xxx'],
1242
1253
        f = self.get_knit()
1243
1254
        get_diamond_files(f, 1, trailing_eol=False)
1244
1255
        ft_data, delta_data = self.helpGetBytes(f,
1245
 
            _mod_knit.FTAnnotatedToUnannotated(None),
1246
 
            _mod_knit.DeltaAnnotatedToUnannotated(None))
 
1256
                                                _mod_knit.FTAnnotatedToUnannotated(
 
1257
                                                    None),
 
1258
                                                _mod_knit.DeltaAnnotatedToUnannotated(None))
1247
1259
        self.assertEqual(
1248
1260
            b'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1249
1261
            b'origin\n'
1260
1272
        f = self.get_knit()
1261
1273
        get_diamond_files(f, 1)
1262
1274
        ft_data, delta_data = self.helpGetBytes(f,
1263
 
            _mod_knit.FTAnnotatedToUnannotated(None),
1264
 
            _mod_knit.DeltaAnnotatedToUnannotated(None))
 
1275
                                                _mod_knit.FTAnnotatedToUnannotated(
 
1276
                                                    None),
 
1277
                                                _mod_knit.DeltaAnnotatedToUnannotated(None))
1265
1278
        self.assertEqual(
1266
1279
            b'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1267
1280
            b'origin\n'
1281
1294
        # must have the base lines requested from it.
1282
1295
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1283
1296
        ft_data, delta_data = self.helpGetBytes(f,
1284
 
            _mod_knit.FTAnnotatedToFullText(None),
1285
 
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
 
1297
                                                _mod_knit.FTAnnotatedToFullText(
 
1298
                                                    None),
 
1299
                                                _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1286
1300
        self.assertEqual(b'origin', ft_data)
1287
1301
        self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1288
1302
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1289
 
            True)], logged_vf.calls)
 
1303
                           True)], logged_vf.calls)
1290
1304
 
1291
1305
    def test_annotated_to_fulltext(self):
1292
1306
        """Test adapting annotated knits to full texts (for -> weaves)."""
1297
1311
        # must have the base lines requested from it.
1298
1312
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1299
1313
        ft_data, delta_data = self.helpGetBytes(f,
1300
 
            _mod_knit.FTAnnotatedToFullText(None),
1301
 
            _mod_knit.DeltaAnnotatedToFullText(logged_vf))
 
1314
                                                _mod_knit.FTAnnotatedToFullText(
 
1315
                                                    None),
 
1316
                                                _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1302
1317
        self.assertEqual(b'origin\n', ft_data)
1303
1318
        self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1304
1319
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1305
 
            True)], logged_vf.calls)
 
1320
                           True)], logged_vf.calls)
1306
1321
 
1307
1322
    def test_unannotated_to_fulltext(self):
1308
1323
        """Test adapting unannotated knits to full texts.
1316
1331
        # must have the base lines requested from it.
1317
1332
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1318
1333
        ft_data, delta_data = self.helpGetBytes(f,
1319
 
            _mod_knit.FTPlainToFullText(None),
1320
 
            _mod_knit.DeltaPlainToFullText(logged_vf))
 
1334
                                                _mod_knit.FTPlainToFullText(
 
1335
                                                    None),
 
1336
                                                _mod_knit.DeltaPlainToFullText(logged_vf))
1321
1337
        self.assertEqual(b'origin\n', ft_data)
1322
1338
        self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1323
1339
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1324
 
            True)], logged_vf.calls)
 
1340
                           True)], logged_vf.calls)
1325
1341
 
1326
1342
    def test_unannotated_to_fulltext_no_eol(self):
1327
1343
        """Test adapting unannotated knits to full texts.
1335
1351
        # must have the base lines requested from it.
1336
1352
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1337
1353
        ft_data, delta_data = self.helpGetBytes(f,
1338
 
            _mod_knit.FTPlainToFullText(None),
1339
 
            _mod_knit.DeltaPlainToFullText(logged_vf))
 
1354
                                                _mod_knit.FTPlainToFullText(
 
1355
                                                    None),
 
1356
                                                _mod_knit.DeltaPlainToFullText(logged_vf))
1340
1357
        self.assertEqual(b'origin', ft_data)
1341
1358
        self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1342
1359
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1343
 
            True)], logged_vf.calls)
 
1360
                           True)], logged_vf.calls)
1344
1361
 
1345
1362
 
1346
1363
class TestKeyMapper(TestCaseWithMemoryTransport):
1362
1379
    def test_hash_prefix_mapper(self):
1363
1380
        #format6: hash + plain
1364
1381
        mapper = versionedfile.HashPrefixMapper()
1365
 
        self.assertEqual("9b/file-id", mapper.map((b"file-id", b"revision-id")))
 
1382
        self.assertEqual(
 
1383
            "9b/file-id", mapper.map((b"file-id", b"revision-id")))
1366
1384
        self.assertEqual("45/new-id", mapper.map((b"new-id", b"revision-id")))
1367
1385
        self.assertEqual((b'file-id',), mapper.unmap("9b/file-id"))
1368
1386
        self.assertEqual((b'new-id',), mapper.unmap("45/new-id"))
1372
1390
        mapper = versionedfile.HashEscapedPrefixMapper()
1373
1391
        self.assertEqual("88/%2520", mapper.map((b" ", b"revision-id")))
1374
1392
        self.assertEqual("ed/fil%2545-%2549d", mapper.map((b"filE-Id",
1375
 
            b"revision-id")))
 
1393
                                                           b"revision-id")))
1376
1394
        self.assertEqual("88/ne%2557-%2549d", mapper.map((b"neW-Id",
1377
 
            b"revision-id")))
 
1395
                                                          b"revision-id")))
1378
1396
        self.assertEqual((b'filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1379
1397
        self.assertEqual((b'neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1380
1398
 
1396
1414
        ('weave-named', {
1397
1415
            'cleanup': None,
1398
1416
            'factory': make_versioned_files_factory(WeaveFile,
1399
 
                ConstantMapper('inventory')),
 
1417
                                                    ConstantMapper('inventory')),
1400
1418
            'graph': True,
1401
1419
            'key_length': 1,
1402
1420
            'support_partial_insertion': False,
1441
1459
        ('weave-prefix', {
1442
1460
            'cleanup': None,
1443
1461
            'factory': make_versioned_files_factory(WeaveFile,
1444
 
                PrefixMapper()),
 
1462
                                                    PrefixMapper()),
1445
1463
            'graph': True,
1446
1464
            'key_length': 2,
1447
1465
            'support_partial_insertion': False,
1497
1515
        g.add_lines(key_a, [], [b'\n'])
1498
1516
        f.add_fallback_versioned_files(g)
1499
1517
        self.assertTrue(key_a in f.get_parent_map([key_a]))
1500
 
        self.assertFalse(key_a in f.without_fallbacks().get_parent_map([key_a]))
 
1518
        self.assertFalse(
 
1519
            key_a in f.without_fallbacks().get_parent_map([key_a]))
1501
1520
 
1502
1521
    def test_add_lines(self):
1503
1522
        f = self.get_versionedfiles()
1556
1575
                ],
1557
1576
                origins)
1558
1577
        self.assertRaises(RevisionNotPresent,
1559
 
            files.annotate, prefix + ('missing-key',))
 
1578
                          files.annotate, prefix + ('missing-key',))
1560
1579
 
1561
1580
    def test_check_no_parameters(self):
1562
1581
        files = self.get_versionedfiles()
1576
1595
        seen = set()
1577
1596
        # Texts output should be fulltexts.
1578
1597
        self.capture_stream(files, entries, seen.add,
1579
 
            files.get_parent_map(keys), require_fulltext=True)
 
1598
                            files.get_parent_map(keys), require_fulltext=True)
1580
1599
        # All texts should be output.
1581
1600
        self.assertEqual(set(keys), seen)
1582
1601
 
1589
1608
        files = self.get_versionedfiles()
1590
1609
 
1591
1610
    def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1592
 
        nokeys=False):
 
1611
                          nokeys=False):
1593
1612
        return get_diamond_files(files, self.key_length,
1594
 
            trailing_eol=trailing_eol, nograph=not self.graph,
1595
 
            left_only=left_only, nokeys=nokeys)
 
1613
                                 trailing_eol=trailing_eol, nograph=not self.graph,
 
1614
                                 left_only=left_only, nokeys=nokeys)
1596
1615
 
1597
1616
    def _add_content_nostoresha(self, add_lines):
1598
1617
        """When nostore_sha is supplied using old content raises."""
1611
1630
            shas.append(sha)
1612
1631
        # we now have a copy of all the lines in the vf.
1613
1632
        for sha, (version, lines) in zip(
1614
 
            shas, (empty_text, sample_text_nl, sample_text_no_nl)):
 
1633
                shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1615
1634
            new_key = self.get_simple_key(version + b"2")
1616
1635
            self.assertRaises(errors.ExistingContent,
1617
 
                vf.add_lines, new_key, [], lines,
1618
 
                nostore_sha=sha)
 
1636
                              vf.add_lines, new_key, [], lines,
 
1637
                              nostore_sha=sha)
1619
1638
            self.assertRaises(errors.ExistingContent,
1620
 
                vf.add_lines, new_key, [], lines,
1621
 
                nostore_sha=sha)
 
1639
                              vf.add_lines, new_key, [], lines,
 
1640
                              nostore_sha=sha)
1622
1641
            # and no new version should have been added.
1623
1642
            record = next(vf.get_record_stream([new_key], 'unordered', True))
1624
1643
            self.assertEqual('absent', record.storage_kind)
1717
1736
        key_a = self.get_simple_key(b'a')
1718
1737
        f.add_lines(key_a, [], [])
1719
1738
        self.assertEqual(b'',
1720
 
            next(f.get_record_stream([key_a], 'unordered', True
1721
 
                )).get_bytes_as('fulltext'))
 
1739
                         next(f.get_record_stream([key_a], 'unordered', True
 
1740
                                                  )).get_bytes_as('fulltext'))
1722
1741
        key_b = self.get_simple_key(b'b')
1723
1742
        f.add_lines(key_b, self.get_parents([key_a]), [])
1724
1743
        self.assertEqual(b'',
1725
 
            next(f.get_record_stream([key_b], 'unordered', True
1726
 
                )).get_bytes_as('fulltext'))
 
1744
                         next(f.get_record_stream([key_b], 'unordered', True
 
1745
                                                  )).get_bytes_as('fulltext'))
1727
1746
 
1728
1747
    def test_newline_only(self):
1729
1748
        f = self.get_versionedfiles()
1730
1749
        key_a = self.get_simple_key(b'a')
1731
1750
        f.add_lines(key_a, [], [b'\n'])
1732
1751
        self.assertEqual(b'\n',
1733
 
            next(f.get_record_stream([key_a], 'unordered', True
1734
 
                )).get_bytes_as('fulltext'))
 
1752
                         next(f.get_record_stream([key_a], 'unordered', True
 
1753
                                                  )).get_bytes_as('fulltext'))
1735
1754
        key_b = self.get_simple_key(b'b')
1736
1755
        f.add_lines(key_b, self.get_parents([key_a]), [b'\n'])
1737
1756
        self.assertEqual(b'\n',
1738
 
            next(f.get_record_stream([key_b], 'unordered', True
1739
 
                )).get_bytes_as('fulltext'))
 
1757
                         next(f.get_record_stream([key_b], 'unordered', True
 
1758
                                                  )).get_bytes_as('fulltext'))
1740
1759
 
1741
1760
    def test_get_known_graph_ancestry(self):
1742
1761
        f = self.get_versionedfiles()
1789
1808
    def assertValidStorageKind(self, storage_kind):
1790
1809
        """Assert that storage_kind is a valid storage_kind."""
1791
1810
        self.assertSubset([storage_kind],
1792
 
            ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1793
 
             'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1794
 
             'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1795
 
             'knit-delta-gz',
1796
 
             'knit-delta-closure', 'knit-delta-closure-ref',
1797
 
             'groupcompress-block', 'groupcompress-block-ref'])
 
1811
                          ['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
 
1812
                           'knit-ft', 'knit-delta', 'chunked', 'fulltext',
 
1813
                           'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
 
1814
                           'knit-delta-gz',
 
1815
                           'knit-delta-closure', 'knit-delta-closure-ref',
 
1816
                           'groupcompress-block', 'groupcompress-block-ref'])
1798
1817
 
1799
1818
    def capture_stream(self, f, entries, on_seen, parents,
1800
 
        require_fulltext=False):
 
1819
                       require_fulltext=False):
1801
1820
        """Capture a stream for testing."""
1802
1821
        for factory in entries:
1803
1822
            on_seen(factory.key)
1804
1823
            self.assertValidStorageKind(factory.storage_kind)
1805
1824
            if factory.sha1 is not None:
1806
1825
                self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1807
 
                    factory.sha1)
 
1826
                                 factory.sha1)
1808
1827
            self.assertEqual(parents[factory.key], factory.parents)
1809
1828
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1810
 
                bytes)
 
1829
                                  bytes)
1811
1830
            if require_fulltext:
1812
1831
                factory.get_bytes_as('fulltext')
1813
1832
 
1826
1845
        """Get diamond test keys list, and their sort ordering."""
1827
1846
        if self.key_length == 1:
1828
1847
            keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1829
 
            sort_order = {(b'merged',):2, (b'left',):1, (b'right',):1, (b'base',):0}
 
1848
            sort_order = {(b'merged',): 2, (b'left',): 1,
 
1849
                          (b'right',): 1, (b'base',): 0}
1830
1850
        else:
1831
1851
            keys = [
1832
1852
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1846
1866
        """Get diamond test keys list, and their groupcompress sort ordering."""
1847
1867
        if self.key_length == 1:
1848
1868
            keys = [(b'merged',), (b'left',), (b'right',), (b'base',)]
1849
 
            sort_order = {(b'merged',): 0, (b'left',): 1, (b'right',): 1, (b'base',):2}
 
1869
            sort_order = {(b'merged',): 0, (b'left',): 1,
 
1870
                          (b'right',): 1, (b'base',): 2}
1850
1871
        else:
1851
1872
            keys = [
1852
1873
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1885
1906
            seen.append(factory.key)
1886
1907
            self.assertValidStorageKind(factory.storage_kind)
1887
1908
            self.assertSubset([factory.sha1],
1888
 
                [None, files.get_sha1s([factory.key])[factory.key]])
 
1909
                              [None, files.get_sha1s([factory.key])[factory.key]])
1889
1910
            self.assertEqual(parent_map[factory.key], factory.parents)
1890
1911
            # self.assertEqual(files.get_text(factory.key),
1891
1912
            ft_bytes = factory.get_bytes_as('fulltext')
1909
1930
    def assertStreamOrder(self, sort_order, seen, keys):
1910
1931
        self.assertEqual(len(set(seen)), len(keys))
1911
1932
        if self.key_length == 1:
1912
 
            lows = {():0}
 
1933
            lows = {(): 0}
1913
1934
        else:
1914
 
            lows = {(b'FileA',):0, (b'FileB',):0}
 
1935
            lows = {(b'FileA',): 0, (b'FileB',): 0}
1915
1936
        if not self.graph:
1916
1937
            self.assertEqual(set(keys), set(seen))
1917
1938
        else:
1918
1939
            for key in seen:
1919
1940
                sort_pos = sort_order[key]
1920
1941
                self.assertTrue(sort_pos >= lows[key[:-1]],
1921
 
                    "Out of order in sorted stream: %r, %r" % (key, seen))
 
1942
                                "Out of order in sorted stream: %r, %r" % (key, seen))
1922
1943
                lows[key[:-1]] = sort_pos
1923
1944
 
1924
1945
    def test_get_record_stream_unknown_storage_kind_raises(self):
1948
1969
            self.assertEqual(parent_map[factory.key], factory.parents)
1949
1970
            # currently no stream emits mpdiff
1950
1971
            self.assertRaises(errors.UnavailableRepresentation,
1951
 
                factory.get_bytes_as, 'mpdiff')
 
1972
                              factory.get_bytes_as, 'mpdiff')
1952
1973
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1953
 
                bytes)
 
1974
                                  bytes)
1954
1975
        self.assertEqual(set(keys), seen)
1955
1976
 
1956
1977
    def test_get_record_stream_missing_records_are_absent(self):
1957
1978
        files = self.get_versionedfiles()
1958
1979
        self.get_diamond_files(files)
1959
1980
        if self.key_length == 1:
1960
 
            keys = [(b'merged',), (b'left',), (b'right',), (b'absent',), (b'base',)]
 
1981
            keys = [(b'merged',), (b'left',), (b'right',),
 
1982
                    (b'absent',), (b'base',)]
1961
1983
        else:
1962
1984
            keys = [
1963
1985
                (b'FileA', b'merged'), (b'FileA', b'left'), (b'FileA', b'right'),
1996
2018
            for record in network_stream:
1997
2019
                records.append(record)
1998
2020
                self.assertEqual(source_record.storage_kind,
1999
 
                    record.storage_kind)
 
2021
                                 record.storage_kind)
2000
2022
                self.assertEqual(source_record.parents, record.parents)
2001
2023
                self.assertEqual(
2002
2024
                    source_record.get_bytes_as(source_record.storage_kind),
2019
2041
            yield record
2020
2042
 
2021
2043
    def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2022
 
        stream):
 
2044
                                        stream):
2023
2045
        """Convert a stream to a bytes iterator.
2024
2046
 
2025
2047
        :param skipped_records: A list with one element to increment when a
2047
2069
            delta_parents = (key,)
2048
2070
        else:
2049
2071
            delta_parents = ()
2050
 
        files.add_lines(key_delta, delta_parents, [b'different\n', b'content\n'])
 
2072
        files.add_lines(key_delta, delta_parents, [
 
2073
                        b'different\n', b'content\n'])
2051
2074
        local = files.get_record_stream([key, key_delta], 'unordered', False)
2052
2075
        ref = files.get_record_stream([key, key_delta], 'unordered', False)
2053
2076
        skipped_records = [0]
2081
2104
            delta_parents = (key,)
2082
2105
        else:
2083
2106
            delta_parents = ()
2084
 
        files.add_lines(key_delta, delta_parents, [b'different\n', b'content\n'])
 
2107
        files.add_lines(key_delta, delta_parents, [
 
2108
                        b'different\n', b'content\n'])
2085
2109
        # Copy the basis text across so we can reconstruct the delta during
2086
2110
        # insertion into target.
2087
2111
        target_files.insert_record_stream(files.get_record_stream([key],
2088
 
            'unordered', False))
 
2112
                                                                  'unordered', False))
2089
2113
        local = files.get_record_stream([key_delta], 'unordered', False)
2090
2114
        ref = files.get_record_stream([key_delta], 'unordered', False)
2091
2115
        skipped_records = [0]
2117
2141
            delta_parents = (key,)
2118
2142
        else:
2119
2143
            delta_parents = ()
2120
 
        files.add_lines(key_delta, delta_parents, [b'different\n', b'content\n'])
 
2144
        files.add_lines(key_delta, delta_parents, [
 
2145
                        b'different\n', b'content\n'])
2121
2146
        local = files.get_record_stream([key_delta], 'unordered', True)
2122
2147
        ref = files.get_record_stream([key_delta], 'unordered', True)
2123
2148
        skipped_records = [0]
2153
2178
                    self.assertEqual(sha1, factory.sha1)
2154
2179
                self.assertEqual(parents[factory.key], factory.parents)
2155
2180
                self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2156
 
                    bytes)
 
2181
                                      bytes)
2157
2182
        self.assertEqual(set(keys), seen)
2158
2183
 
2159
2184
    def test_filter_absent_records(self):
2173
2198
        entries = files.get_record_stream(keys, 'unordered', False)
2174
2199
        seen = set()
2175
2200
        self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2176
 
            parent_map)
 
2201
                            parent_map)
2177
2202
        self.assertEqual(set(present_keys), seen)
2178
2203
 
2179
2204
    def get_mapper(self):
2224
2249
                (merged_key,),
2225
2250
                ], origins)
2226
2251
        self.assertRaises(RevisionNotPresent,
2227
 
            files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
 
2252
                          files.get_annotator().annotate, self.get_simple_key(b'missing-key'))
2228
2253
 
2229
2254
    def test_get_parent_map(self):
2230
2255
        files = self.get_versionedfiles()
2243
2268
                ((b'FileA', b'r2'), self.get_parents(())),
2244
2269
                ((b'FileA', b'r3'), self.get_parents(())),
2245
2270
                ((b'FileA', b'm'), self.get_parents(((b'FileA', b'r0'),
2246
 
                    (b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
 
2271
                                                     (b'FileA', b'r1'), (b'FileA', b'r2'), (b'FileA', b'r3')))),
2247
2272
                ]
2248
2273
        for key, parents in parent_details:
2249
2274
            files.add_lines(key, parents, [])
2250
2275
            # immediately after adding it should be queryable.
2251
 
            self.assertEqual({key:parents}, files.get_parent_map([key]))
 
2276
            self.assertEqual({key: parents}, files.get_parent_map([key]))
2252
2277
        # We can ask for an empty set
2253
2278
        self.assertEqual({}, files.get_parent_map([]))
2254
2279
        # We can ask for many keys
2267
2292
        files = self.get_versionedfiles()
2268
2293
        self.get_diamond_files(files)
2269
2294
        if self.key_length == 1:
2270
 
            keys = [(b'base',), (b'origin',), (b'left',), (b'merged',), (b'right',)]
 
2295
            keys = [(b'base',), (b'origin',), (b'left',),
 
2296
                    (b'merged',), (b'right',)]
2271
2297
        else:
2272
2298
            # ask for shas from different prefixes.
2273
2299
            keys = [
2293
2319
        self.assertEqual(set(actual.keys()), set(expected.keys()))
2294
2320
        actual_parents = actual.get_parent_map(actual.keys())
2295
2321
        if self.graph:
2296
 
            self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
 
2322
            self.assertEqual(
 
2323
                actual_parents, expected.get_parent_map(expected.keys()))
2297
2324
        else:
2298
2325
            for key, parents in actual_parents.items():
2299
2326
                self.assertEqual(None, parents)
2315
2342
            source_transport)
2316
2343
        self.get_diamond_files(source, trailing_eol=False)
2317
2344
        stream = source.get_record_stream(source.keys(), 'topological',
2318
 
            False)
 
2345
                                          False)
2319
2346
        files.insert_record_stream(stream)
2320
2347
        self.assertIdenticalVersionedFile(source, files)
2321
2348
 
2330
2357
            source_transport)
2331
2358
        self.get_diamond_files(source, trailing_eol=False)
2332
2359
        stream = source.get_record_stream(source.keys(), 'topological',
2333
 
            False)
 
2360
                                          False)
2334
2361
        files.insert_record_stream(stream)
2335
2362
        self.assertIdenticalVersionedFile(source, files)
2336
2363
 
2343
2370
        source = make_file_factory(True, mapper)(source_transport)
2344
2371
        self.get_diamond_files(source)
2345
2372
        stream = source.get_record_stream(source.keys(), 'topological',
2346
 
            False)
 
2373
                                          False)
2347
2374
        files.insert_record_stream(stream)
2348
2375
        self.assertIdenticalVersionedFile(source, files)
2349
2376
 
2356
2383
        source = make_file_factory(True, mapper)(source_transport)
2357
2384
        self.get_diamond_files(source, trailing_eol=False)
2358
2385
        stream = source.get_record_stream(source.keys(), 'topological',
2359
 
            False)
 
2386
                                          False)
2360
2387
        files.insert_record_stream(stream)
2361
2388
        self.assertIdenticalVersionedFile(source, files)
2362
2389
 
2369
2396
        source = make_file_factory(False, mapper)(source_transport)
2370
2397
        self.get_diamond_files(source)
2371
2398
        stream = source.get_record_stream(source.keys(), 'topological',
2372
 
            False)
 
2399
                                          False)
2373
2400
        files.insert_record_stream(stream)
2374
2401
        self.assertIdenticalVersionedFile(source, files)
2375
2402
 
2382
2409
        source = make_file_factory(False, mapper)(source_transport)
2383
2410
        self.get_diamond_files(source, trailing_eol=False)
2384
2411
        stream = source.get_record_stream(source.keys(), 'topological',
2385
 
            False)
 
2412
                                          False)
2386
2413
        files.insert_record_stream(stream)
2387
2414
        self.assertIdenticalVersionedFile(source, files)
2388
2415
 
2394
2421
        # insert some keys into f.
2395
2422
        self.get_diamond_files(files, left_only=True)
2396
2423
        stream = source.get_record_stream(source.keys(), 'topological',
2397
 
            False)
 
2424
                                          False)
2398
2425
        files.insert_record_stream(stream)
2399
2426
        self.assertIdenticalVersionedFile(source, files)
2400
2427
 
2403
2430
        files = self.get_versionedfiles()
2404
2431
        source = self.get_versionedfiles('source')
2405
2432
        stream = source.get_record_stream([(b'missing',) * self.key_length],
2406
 
            'topological', False)
 
2433
                                          'topological', False)
2407
2434
        self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2408
 
            stream)
 
2435
                          stream)
2409
2436
 
2410
2437
    def test_insert_record_stream_out_of_order(self):
2411
2438
        """An out of order stream can either error or work."""
2419
2446
        else:
2420
2447
            origin_keys = [(b'FileA', b'origin'), (b'FileB', b'origin')]
2421
2448
            end_keys = [(b'FileA', b'merged',), (b'FileA', b'left',),
2422
 
                (b'FileB', b'merged',), (b'FileB', b'left',)]
 
2449
                        (b'FileB', b'merged',), (b'FileB', b'left',)]
2423
2450
            start_keys = [(b'FileA', b'right',), (b'FileA', b'base',),
2424
 
                (b'FileB', b'right',), (b'FileB', b'base',)]
2425
 
        origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
 
2451
                          (b'FileB', b'right',), (b'FileB', b'base',)]
 
2452
        origin_entries = source.get_record_stream(
 
2453
            origin_keys, 'unordered', False)
2426
2454
        end_entries = source.get_record_stream(end_keys, 'topological', False)
2427
 
        start_entries = source.get_record_stream(start_keys, 'topological', False)
 
2455
        start_entries = source.get_record_stream(
 
2456
            start_keys, 'topological', False)
2428
2457
        entries = itertools.chain(origin_entries, end_entries, start_entries)
2429
2458
        try:
2430
2459
            files.insert_record_stream(entries)
2446
2475
        content = [(b'same same %d\n' % n) for n in range(500)]
2447
2476
        letters = b'abcdefghijklmnopqrstuvwxyz'
2448
2477
        for i in range(len(letters)):
2449
 
            letter = letters[i:i+1]
 
2478
            letter = letters[i:i + 1]
2450
2479
            key = (b'key-' + letter,)
2451
2480
            if self.key_length == 2:
2452
2481
                key = (b'prefix',) + key
2482
2511
        source_transport.mkdir('.')
2483
2512
        source = make_file_factory(False, mapper)(source_transport)
2484
2513
        get_diamond_files(source, self.key_length, trailing_eol=True,
2485
 
            nograph=False, left_only=False)
 
2514
                          nograph=False, left_only=False)
2486
2515
        return source
2487
2516
 
2488
2517
    def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2496
2525
        files = self.get_versionedfiles()
2497
2526
        if self.support_partial_insertion:
2498
2527
            self.assertEqual([],
2499
 
                list(files.get_missing_compression_parent_keys()))
 
2528
                             list(files.get_missing_compression_parent_keys()))
2500
2529
            files.insert_record_stream(entries)
2501
2530
            missing_bases = files.get_missing_compression_parent_keys()
2502
2531
            self.assertEqual({self.get_simple_key(b'left')},
2503
 
                set(missing_bases))
 
2532
                             set(missing_bases))
2504
2533
            self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2505
2534
        else:
2506
2535
            self.assertRaises(
2519
2548
                'versioned file scenario does not support partial insertion')
2520
2549
        source = self.get_knit_delta_source()
2521
2550
        entries = source.get_record_stream([self.get_simple_key(b'origin'),
2522
 
            self.get_simple_key(b'merged')], 'unordered', False)
 
2551
                                            self.get_simple_key(b'merged')], 'unordered', False)
2523
2552
        files = self.get_versionedfiles()
2524
2553
        files.insert_record_stream(entries)
2525
2554
        missing_bases = files.get_missing_compression_parent_keys()
2526
2555
        self.assertEqual({self.get_simple_key(b'left')},
2527
 
            set(missing_bases))
 
2556
                         set(missing_bases))
2528
2557
        # 'merged' is inserted (although a commit of a write group involving
2529
2558
        # this versionedfiles would fail).
2530
2559
        merged_key = self.get_simple_key(b'merged')
2559
2588
        # add a base to get included
2560
2589
        files.add_lines(self.get_simple_key(b'base'), (), [b'base\n'])
2561
2590
        # add a ancestor to be included on one side
2562
 
        files.add_lines(self.get_simple_key(b'lancestor'), (), [b'lancestor\n'])
 
2591
        files.add_lines(self.get_simple_key(
 
2592
            b'lancestor'), (), [b'lancestor\n'])
2563
2593
        # add a ancestor to be included on the other side
2564
2594
        files.add_lines(self.get_simple_key(b'rancestor'),
2565
 
            self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
 
2595
                        self.get_parents([self.get_simple_key(b'base')]), [b'rancestor\n'])
2566
2596
        # add a child of rancestor with no eofile-nl
2567
2597
        files.add_lines(self.get_simple_key(b'child'),
2568
 
            self.get_parents([self.get_simple_key(b'rancestor')]),
2569
 
            [b'base\n', b'child\n'])
 
2598
                        self.get_parents([self.get_simple_key(b'rancestor')]),
 
2599
                        [b'base\n', b'child\n'])
2570
2600
        # add a child of lancestor and base to join the two roots
2571
2601
        files.add_lines(self.get_simple_key(b'otherchild'),
2572
 
            self.get_parents([self.get_simple_key(b'lancestor'),
2573
 
                self.get_simple_key(b'base')]),
2574
 
            [b'base\n', b'lancestor\n', b'otherchild\n'])
 
2602
                        self.get_parents([self.get_simple_key(b'lancestor'),
 
2603
                                          self.get_simple_key(b'base')]),
 
2604
                        [b'base\n', b'lancestor\n', b'otherchild\n'])
 
2605
 
2575
2606
        def iter_with_keys(keys, expected):
2576
2607
            # now we need to see what lines are returned, and how often.
2577
2608
            lines = {}
2578
2609
            progress = InstrumentedProgress()
2579
2610
            # iterate over the lines
2580
2611
            for line in files.iter_lines_added_or_present_in_keys(keys,
2581
 
                pb=progress):
 
2612
                                                                  pb=progress):
2582
2613
                lines.setdefault(line, 0)
2583
2614
                lines[line] += 1
2584
 
            if []!= progress.updates:
 
2615
            if [] != progress.updates:
2585
2616
                self.assertEqual(expected, progress.updates)
2586
2617
            return lines
2587
2618
        lines = iter_with_keys(
2588
 
            [self.get_simple_key(b'child'), self.get_simple_key(b'otherchild')],
 
2619
            [self.get_simple_key(b'child'),
 
2620
             self.get_simple_key(b'otherchild')],
2589
2621
            [('Walking content', 0, 2),
2590
2622
             ('Walking content', 1, 2),
2591
2623
             ('Walking content', 2, 2)])
2597
2629
 
2598
2630
        # test all lines
2599
2631
        lines = iter_with_keys(files.keys(),
2600
 
            [('Walking content', 0, 5),
2601
 
             ('Walking content', 1, 5),
2602
 
             ('Walking content', 2, 5),
2603
 
             ('Walking content', 3, 5),
2604
 
             ('Walking content', 4, 5),
2605
 
             ('Walking content', 5, 5)])
 
2632
                               [('Walking content', 0, 5),
 
2633
                                ('Walking content', 1, 5),
 
2634
                                ('Walking content', 2, 5),
 
2635
                                ('Walking content', 3, 5),
 
2636
                                ('Walking content', 4, 5),
 
2637
                                ('Walking content', 5, 5)])
2606
2638
        # all lines must be seen at least once
2607
2639
        self.assertTrue(lines[(b'base\n', self.get_simple_key(b'base'))] > 0)
2608
2640
        self.assertTrue(
2621
2653
        # this is done by two chains of 25 insertions
2622
2654
        files.add_lines(self.get_simple_key(b'base'), [], [b'line\n'])
2623
2655
        files.add_lines(self.get_simple_key(b'noeol'),
2624
 
            self.get_parents([self.get_simple_key(b'base')]), [b'line'])
 
2656
                        self.get_parents([self.get_simple_key(b'base')]), [b'line'])
2625
2657
        # detailed eol tests:
2626
2658
        # shared last line with parent no-eol
2627
2659
        files.add_lines(self.get_simple_key(b'noeolsecond'),
2628
 
            self.get_parents([self.get_simple_key(b'noeol')]),
2629
 
                [b'line\n', b'line'])
 
2660
                        self.get_parents([self.get_simple_key(b'noeol')]),
 
2661
                        [b'line\n', b'line'])
2630
2662
        # differing last line with parent, both no-eol
2631
2663
        files.add_lines(self.get_simple_key(b'noeolnotshared'),
2632
 
            self.get_parents([self.get_simple_key(b'noeolsecond')]),
2633
 
                [b'line\n', b'phone'])
 
2664
                        self.get_parents(
 
2665
                            [self.get_simple_key(b'noeolsecond')]),
 
2666
                        [b'line\n', b'phone'])
2634
2667
        # add eol following a noneol parent, change content
2635
2668
        files.add_lines(self.get_simple_key(b'eol'),
2636
 
            self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
 
2669
                        self.get_parents([self.get_simple_key(b'noeol')]), [b'phone\n'])
2637
2670
        # add eol following a noneol parent, no change content
2638
2671
        files.add_lines(self.get_simple_key(b'eolline'),
2639
 
            self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
 
2672
                        self.get_parents([self.get_simple_key(b'noeol')]), [b'line\n'])
2640
2673
        # noeol with no parents:
2641
2674
        files.add_lines(self.get_simple_key(b'noeolbase'), [], [b'line'])
2642
2675
        # noeol preceeding its leftmost parent in the output:
2644
2677
        # anestry: noeolbase and noeol with the
2645
2678
        # later-inserted parent the leftmost.
2646
2679
        files.add_lines(self.get_simple_key(b'eolbeforefirstparent'),
2647
 
            self.get_parents([self.get_simple_key(b'noeolbase'),
2648
 
                self.get_simple_key(b'noeol')]),
2649
 
            [b'line'])
 
2680
                        self.get_parents([self.get_simple_key(b'noeolbase'),
 
2681
                                          self.get_simple_key(b'noeol')]),
 
2682
                        [b'line'])
2650
2683
        # two identical eol texts
2651
2684
        files.add_lines(self.get_simple_key(b'noeoldup'),
2652
 
            self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
 
2685
                        self.get_parents([self.get_simple_key(b'noeol')]), [b'line'])
2653
2686
        next_parent = self.get_simple_key(b'base')
2654
2687
        text_name = b'chain1-'
2655
2688
        text = [b'line\n']
2701
2734
                [(key, parents, files.get_sha1s([key])[key], mpdiff)])
2702
2735
            self.assertEqualDiff(
2703
2736
                next(files.get_record_stream([key], 'unordered',
2704
 
                    True)).get_bytes_as('fulltext'),
 
2737
                                             True)).get_bytes_as('fulltext'),
2705
2738
                next(target.get_record_stream([key], 'unordered',
2706
 
                    True)).get_bytes_as('fulltext')
 
2739
                                              True)).get_bytes_as('fulltext')
2707
2740
                )
2708
2741
 
2709
2742
    def test_keys(self):
2738
2771
 
2739
2772
    def test_add_lines(self):
2740
2773
        self.assertRaises(NotImplementedError,
2741
 
                self.texts.add_lines, b"foo", [], [])
 
2774
                          self.texts.add_lines, b"foo", [], [])
2742
2775
 
2743
2776
    def test_add_mpdiffs(self):
2744
2777
        self.assertRaises(NotImplementedError,
2745
 
                self.texts.add_mpdiffs, [])
 
2778
                          self.texts.add_mpdiffs, [])
2746
2779
 
2747
2780
    def test_check_noerrors(self):
2748
2781
        self.texts.check()
2757
2790
    def test_get_sha1s(self):
2758
2791
        self._lines[b"key"] = [b"dataline1", b"dataline2"]
2759
2792
        self.assertEqual({(b"key",): osutils.sha_strings(self._lines[b"key"])},
2760
 
                           self.texts.get_sha1s([(b"key",)]))
 
2793
                         self.texts.get_sha1s([(b"key",)]))
2761
2794
 
2762
2795
    def test_get_parent_map(self):
2763
2796
        self._parent_map = {b"G": (b"A", b"B")}
2764
2797
        self.assertEqual({(b"G",): ((b"A",), (b"B",))},
2765
 
                          self.texts.get_parent_map([(b"G",), (b"L",)]))
 
2798
                         self.texts.get_parent_map([(b"G",), (b"L",)]))
2766
2799
 
2767
2800
    def test_get_record_stream(self):
2768
2801
        self._lines[b"A"] = [b"FOO", b"BAR"]
2783
2816
        self._lines[b"C"] = [b"Alberta"]
2784
2817
        it = self.texts.iter_lines_added_or_present_in_keys([(b"A",), (b"B",)])
2785
2818
        self.assertEqual(sorted([(b"FOO", b"A"), (b"BAR", b"A"), (b"HEY", b"B")]),
2786
 
            sorted(list(it)))
 
2819
                         sorted(list(it)))
2787
2820
 
2788
2821
 
2789
2822
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2809
2842
        self.assertEqual([], vf.calls)
2810
2843
 
2811
2844
    def test_get_record_stream_topological(self):
2812
 
        vf = self.get_ordering_vf({(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
 
2845
        vf = self.get_ordering_vf(
 
2846
            {(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2813
2847
        request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2814
2848
        keys = [r.key for r in vf.get_record_stream(request_keys,
2815
 
                                    'topological', False)]
 
2849
                                                    'topological', False)]
2816
2850
        # We should have gotten the keys in topological order
2817
2851
        self.assertEqual([(b'A',), (b'B',), (b'C',), (b'D',)], keys)
2818
2852
        # And recorded that the request was made
2820
2854
                           False)], vf.calls)
2821
2855
 
2822
2856
    def test_get_record_stream_ordered(self):
2823
 
        vf = self.get_ordering_vf({(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
 
2857
        vf = self.get_ordering_vf(
 
2858
            {(b'A',): 3, (b'B',): 2, (b'C',): 4, (b'D',): 1})
2824
2859
        request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2825
2860
        keys = [r.key for r in vf.get_record_stream(request_keys,
2826
 
                                   'unordered', False)]
 
2861
                                                    'unordered', False)]
2827
2862
        # They should be returned based on their priority
2828
2863
        self.assertEqual([(b'D',), (b'B',), (b'A',), (b'C',)], keys)
2829
2864
        # And the request recorded
2834
2869
        vf = self.get_ordering_vf({(b'B',): 2, (b'D',): 1})
2835
2870
        request_keys = [(b'B',), (b'C',), (b'D',), (b'A',)]
2836
2871
        keys = [r.key for r in vf.get_record_stream(request_keys,
2837
 
                                   'unordered', False)]
 
2872
                                                    'unordered', False)]
2838
2873
        # A and C are not in the map, so they get sorted to the front. A comes
2839
2874
        # before C alphabetically, so it comes back first
2840
2875
        self.assertEqual([(b'A',), (b'C',), (b'D',), (b'B',)], keys)