462
357
writer = pack.ContainerWriter(write_data)
464
359
access.set_writer(writer, index, (transport, packname))
465
memos = access.add_raw_records([('key', 10)], '1234567890')
360
memos = access.add_raw_records([10], '1234567890')
467
362
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
469
def test_missing_index_raises_retry(self):
470
memos = self.make_pack_file()
471
transport = self.get_transport()
472
reload_called, reload_func = self.make_reload_func()
473
# Note that the index key has changed from 'foo' to 'bar'
474
access = _DirectPackAccess({'bar':(transport, 'packname')},
475
reload_func=reload_func)
476
e = self.assertListRaises(errors.RetryWithNewPacks,
477
access.get_raw_records, memos)
478
# Because a key was passed in which does not match our index list, we
479
# assume that the listing was already reloaded
480
self.assertTrue(e.reload_occurred)
481
self.assertIsInstance(e.exc_info, tuple)
482
self.assertIs(e.exc_info[0], KeyError)
483
self.assertIsInstance(e.exc_info[1], KeyError)
485
def test_missing_index_raises_key_error_with_no_reload(self):
486
memos = self.make_pack_file()
487
transport = self.get_transport()
488
# Note that the index key has changed from 'foo' to 'bar'
489
access = _DirectPackAccess({'bar':(transport, 'packname')})
490
e = self.assertListRaises(KeyError, access.get_raw_records, memos)
492
def test_missing_file_raises_retry(self):
493
memos = self.make_pack_file()
494
transport = self.get_transport()
495
reload_called, reload_func = self.make_reload_func()
496
# Note that the 'filename' has been changed to 'different-packname'
497
access = _DirectPackAccess({'foo':(transport, 'different-packname')},
498
reload_func=reload_func)
499
e = self.assertListRaises(errors.RetryWithNewPacks,
500
access.get_raw_records, memos)
501
# The file has gone missing, so we assume we need to reload
502
self.assertFalse(e.reload_occurred)
503
self.assertIsInstance(e.exc_info, tuple)
504
self.assertIs(e.exc_info[0], errors.NoSuchFile)
505
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
506
self.assertEqual('different-packname', e.exc_info[1].path)
508
def test_missing_file_raises_no_such_file_with_no_reload(self):
509
memos = self.make_pack_file()
510
transport = self.get_transport()
511
# Note that the 'filename' has been changed to 'different-packname'
512
access = _DirectPackAccess({'foo':(transport, 'different-packname')})
513
e = self.assertListRaises(errors.NoSuchFile,
514
access.get_raw_records, memos)
516
def test_failing_readv_raises_retry(self):
517
memos = self.make_pack_file()
518
transport = self.get_transport()
519
failing_transport = MockReadvFailingTransport(
520
[transport.get_bytes('packname')])
521
reload_called, reload_func = self.make_reload_func()
522
access = _DirectPackAccess({'foo':(failing_transport, 'packname')},
523
reload_func=reload_func)
524
# Asking for a single record will not trigger the Mock failure
525
self.assertEqual(['1234567890'],
526
list(access.get_raw_records(memos[:1])))
527
self.assertEqual(['12345'],
528
list(access.get_raw_records(memos[1:2])))
529
# A multiple offset readv() will fail mid-way through
530
e = self.assertListRaises(errors.RetryWithNewPacks,
531
access.get_raw_records, memos)
532
# The file has gone missing, so we assume we need to reload
533
self.assertFalse(e.reload_occurred)
534
self.assertIsInstance(e.exc_info, tuple)
535
self.assertIs(e.exc_info[0], errors.NoSuchFile)
536
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
537
self.assertEqual('packname', e.exc_info[1].path)
539
def test_failing_readv_raises_no_such_file_with_no_reload(self):
540
memos = self.make_pack_file()
541
transport = self.get_transport()
542
failing_transport = MockReadvFailingTransport(
543
[transport.get_bytes('packname')])
544
reload_called, reload_func = self.make_reload_func()
545
access = _DirectPackAccess({'foo':(failing_transport, 'packname')})
546
# Asking for a single record will not trigger the Mock failure
547
self.assertEqual(['1234567890'],
548
list(access.get_raw_records(memos[:1])))
549
self.assertEqual(['12345'],
550
list(access.get_raw_records(memos[1:2])))
551
# A multiple offset readv() will fail mid-way through
552
e = self.assertListRaises(errors.NoSuchFile,
553
access.get_raw_records, memos)
555
def test_reload_or_raise_no_reload(self):
556
access = _DirectPackAccess({}, reload_func=None)
557
retry_exc = self.make_retry_exception()
558
# Without a reload_func, we will just re-raise the original exception
559
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
561
def test_reload_or_raise_reload_changed(self):
562
reload_called, reload_func = self.make_reload_func(return_val=True)
563
access = _DirectPackAccess({}, reload_func=reload_func)
564
retry_exc = self.make_retry_exception()
565
access.reload_or_raise(retry_exc)
566
self.assertEqual([1], reload_called)
567
retry_exc.reload_occurred=True
568
access.reload_or_raise(retry_exc)
569
self.assertEqual([2], reload_called)
571
def test_reload_or_raise_reload_no_change(self):
572
reload_called, reload_func = self.make_reload_func(return_val=False)
573
access = _DirectPackAccess({}, reload_func=reload_func)
574
retry_exc = self.make_retry_exception()
575
# If reload_occurred is False, then we consider it an error to have
576
# reload_func() return False (no changes).
577
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
578
self.assertEqual([1], reload_called)
579
retry_exc.reload_occurred=True
580
# If reload_occurred is True, then we assume nothing changed because
581
# it had changed earlier, but didn't change again
582
access.reload_or_raise(retry_exc)
583
self.assertEqual([2], reload_called)
585
def test_annotate_retries(self):
586
vf, reload_counter = self.make_vf_for_retrying()
587
# It is a little bit bogus to annotate the Revision VF, but it works,
588
# as we have ancestry stored there
590
reload_lines = vf.annotate(key)
591
self.assertEqual([1, 1, 0], reload_counter)
592
plain_lines = vf.annotate(key)
593
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
594
if reload_lines != plain_lines:
595
self.fail('Annotation was not identical with reloading.')
596
# Now delete the packs-in-use, which should trigger another reload, but
597
# this time we just raise an exception because we can't recover
598
for trans, name in vf._access._indices.itervalues():
600
self.assertRaises(errors.NoSuchFile, vf.annotate, key)
601
self.assertEqual([2, 1, 1], reload_counter)
603
def test__get_record_map_retries(self):
604
vf, reload_counter = self.make_vf_for_retrying()
605
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
606
records = vf._get_record_map(keys)
607
self.assertEqual(keys, sorted(records.keys()))
608
self.assertEqual([1, 1, 0], reload_counter)
609
# Now delete the packs-in-use, which should trigger another reload, but
610
# this time we just raise an exception because we can't recover
611
for trans, name in vf._access._indices.itervalues():
613
self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
614
self.assertEqual([2, 1, 1], reload_counter)
616
def test_get_record_stream_retries(self):
617
vf, reload_counter = self.make_vf_for_retrying()
618
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
619
record_stream = vf.get_record_stream(keys, 'topological', False)
620
record = record_stream.next()
621
self.assertEqual(('rev-1',), record.key)
622
self.assertEqual([0, 0, 0], reload_counter)
623
record = record_stream.next()
624
self.assertEqual(('rev-2',), record.key)
625
self.assertEqual([1, 1, 0], reload_counter)
626
record = record_stream.next()
627
self.assertEqual(('rev-3',), record.key)
628
self.assertEqual([1, 1, 0], reload_counter)
629
# Now delete all pack files, and see that we raise the right error
630
for trans, name in vf._access._indices.itervalues():
632
self.assertListRaises(errors.NoSuchFile,
633
vf.get_record_stream, keys, 'topological', False)
635
def test_iter_lines_added_or_present_in_keys_retries(self):
636
vf, reload_counter = self.make_vf_for_retrying()
637
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
638
# Unfortunately, iter_lines_added_or_present_in_keys iterates the
639
# result in random order (determined by the iteration order from a
640
# set()), so we don't have any solid way to trigger whether data is
641
# read before or after. However we tried to delete the middle node to
642
# exercise the code well.
643
# What we care about is that all lines are always yielded, but not
646
reload_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
647
self.assertEqual([1, 1, 0], reload_counter)
648
# Now do it again, to make sure the result is equivalent
649
plain_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
650
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
651
self.assertEqual(plain_lines, reload_lines)
652
self.assertEqual(21, len(plain_lines))
653
# Now delete all pack files, and see that we raise the right error
654
for trans, name in vf._access._indices.itervalues():
656
self.assertListRaises(errors.NoSuchFile,
657
vf.iter_lines_added_or_present_in_keys, keys)
658
self.assertEqual([2, 1, 1], reload_counter)
660
def test_get_record_stream_yields_disk_sorted_order(self):
661
# if we get 'unordered' pick a semi-optimal order for reading. The
662
# order should be grouped by pack file, and then by position in file
663
repo = self.make_repository('test', format='pack-0.92')
665
self.addCleanup(repo.unlock)
666
repo.start_write_group()
668
vf.add_lines(('f-id', 'rev-5'), [('f-id', 'rev-4')], ['lines\n'])
669
vf.add_lines(('f-id', 'rev-1'), [], ['lines\n'])
670
vf.add_lines(('f-id', 'rev-2'), [('f-id', 'rev-1')], ['lines\n'])
671
repo.commit_write_group()
672
# We inserted them as rev-5, rev-1, rev-2, we should get them back in
674
stream = vf.get_record_stream([('f-id', 'rev-1'), ('f-id', 'rev-5'),
675
('f-id', 'rev-2')], 'unordered', False)
676
keys = [r.key for r in stream]
677
self.assertEqual([('f-id', 'rev-5'), ('f-id', 'rev-1'),
678
('f-id', 'rev-2')], keys)
679
repo.start_write_group()
680
vf.add_lines(('f-id', 'rev-4'), [('f-id', 'rev-3')], ['lines\n'])
681
vf.add_lines(('f-id', 'rev-3'), [('f-id', 'rev-2')], ['lines\n'])
682
vf.add_lines(('f-id', 'rev-6'), [('f-id', 'rev-5')], ['lines\n'])
683
repo.commit_write_group()
684
# Request in random order, to make sure the output order isn't based on
686
request_keys = set(('f-id', 'rev-%d' % i) for i in range(1, 7))
687
stream = vf.get_record_stream(request_keys, 'unordered', False)
688
keys = [r.key for r in stream]
689
# We want to get the keys back in disk order, but it doesn't matter
690
# which pack we read from first. So this can come back in 2 orders
691
alt1 = [('f-id', 'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
692
alt2 = [('f-id', 'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
693
if keys != alt1 and keys != alt2:
694
self.fail('Returned key order did not match either expected order.'
695
' expected %s or %s, not %s'
696
% (alt1, alt2, keys))
699
365
class LowLevelKnitDataTests(TestCase):
848
471
# Change 2 bytes in the middle to \xff
849
472
gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
850
473
transport = MockTransport([gz_txt])
851
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
852
knit = KnitVersionedFiles(None, access)
853
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
854
self.assertRaises(errors.KnitCorrupt, list,
855
knit._read_records_iter(records))
856
# read_records_iter_raw will barf on bad gz data
857
self.assertRaises(errors.KnitCorrupt, list,
858
knit._read_records_iter_raw(records))
474
access = _KnitAccess(transport, 'filename', None, None, False, False)
475
data = _KnitData(access=access)
476
records = [('rev-id-1', (None, 0, len(gz_txt)))]
478
self.assertRaises(errors.KnitCorrupt, data.read_records, records)
480
# read_records_iter_raw will notice if we request the wrong version.
481
self.assertRaises(errors.KnitCorrupt, list,
482
data.read_records_iter_raw(records))
861
485
class LowLevelKnitIndexTests(TestCase):
863
def get_knit_index(self, transport, name, mode):
864
mapper = ConstantMapper(name)
487
def get_knit_index(self, *args, **kwargs):
488
orig = knit._load_data
490
knit._load_data = orig
491
self.addCleanup(reset)
865
492
from bzrlib._knit_load_data_py import _load_data_py
866
self.overrideAttr(knit, '_load_data', _load_data_py)
867
allow_writes = lambda: 'w' in mode
868
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
493
knit._load_data = _load_data_py
494
return _KnitIndex(*args, **kwargs)
496
def test_no_such_file(self):
497
transport = MockTransport()
499
self.assertRaises(NoSuchFile, self.get_knit_index,
500
transport, "filename", "r")
501
self.assertRaises(NoSuchFile, self.get_knit_index,
502
transport, "filename", "w", create=False)
870
504
def test_create_file(self):
871
505
transport = MockTransport()
872
index = self.get_knit_index(transport, "filename", "w")
874
call = transport.calls.pop(0)
875
# call[1][1] is a StringIO - we can't test it by simple equality.
876
self.assertEqual('put_file_non_atomic', call[0])
877
self.assertEqual('filename.kndx', call[1][0])
878
# With no history, _KndxIndex writes a new index:
879
self.assertEqual(_KndxIndex.HEADER,
880
call[1][1].getvalue())
881
self.assertEqual({'create_parent_dir': True}, call[2])
507
index = self.get_knit_index(transport, "filename", "w",
508
file_mode="wb", create=True)
510
("put_bytes_non_atomic",
511
("filename", index.HEADER), {"mode": "wb"}),
512
transport.calls.pop(0))
514
def test_delay_create_file(self):
515
transport = MockTransport()
517
index = self.get_knit_index(transport, "filename", "w",
518
create=True, file_mode="wb", create_parent_dir=True,
519
delay_create=True, dir_mode=0777)
520
self.assertEqual([], transport.calls)
522
index.add_versions([])
523
name, (filename, f), kwargs = transport.calls.pop(0)
524
self.assertEqual("put_file_non_atomic", name)
526
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
528
self.assertEqual("filename", filename)
529
self.assertEqual(index.HEADER, f.read())
531
index.add_versions([])
532
self.assertEqual(("append_bytes", ("filename", ""), {}),
533
transport.calls.pop(0))
883
535
def test_read_utf8_version_id(self):
884
536
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
885
537
utf8_revision_id = unicode_revision_id.encode('utf-8')
886
538
transport = MockTransport([
888
540
'%s option 0 1 :' % (utf8_revision_id,)
890
542
index = self.get_knit_index(transport, "filename", "r")
891
# _KndxIndex is a private class, and deals in utf8 revision_ids, not
543
# _KnitIndex is a private class, and deals in utf8 revision_ids, not
892
544
# Unicode revision_ids.
893
self.assertEqual({(utf8_revision_id,):()},
894
index.get_parent_map(index.keys()))
895
self.assertFalse((unicode_revision_id,) in index.keys())
545
self.assertTrue(index.has_version(utf8_revision_id))
546
self.assertFalse(index.has_version(unicode_revision_id))
897
548
def test_read_utf8_parents(self):
898
549
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
899
550
utf8_revision_id = unicode_revision_id.encode('utf-8')
900
551
transport = MockTransport([
902
553
"version option 0 1 .%s :" % (utf8_revision_id,)
904
555
index = self.get_knit_index(transport, "filename", "r")
905
self.assertEqual({("version",):((utf8_revision_id,),)},
906
index.get_parent_map(index.keys()))
556
self.assertEqual([utf8_revision_id],
557
index.get_parents_with_ghosts("version"))
908
559
def test_read_ignore_corrupted_lines(self):
909
560
transport = MockTransport([
912
563
"corrupted options 0 1 .b .c ",
913
564
"version options 0 1 :"
915
566
index = self.get_knit_index(transport, "filename", "r")
916
self.assertEqual(1, len(index.keys()))
917
self.assertEqual(set([("version",)]), index.keys())
567
self.assertEqual(1, index.num_versions())
568
self.assertTrue(index.has_version("version"))
919
570
def test_read_corrupted_header(self):
920
571
transport = MockTransport(['not a bzr knit index header\n'])
921
index = self.get_knit_index(transport, "filename", "r")
922
self.assertRaises(KnitHeaderError, index.keys)
572
self.assertRaises(KnitHeaderError,
573
self.get_knit_index, transport, "filename", "r")
924
575
def test_read_duplicate_entries(self):
925
576
transport = MockTransport([
927
578
"parent options 0 1 :",
928
579
"version options1 0 1 0 :",
929
580
"version options2 1 2 .other :",
930
581
"version options3 3 4 0 .other :"
932
583
index = self.get_knit_index(transport, "filename", "r")
933
self.assertEqual(2, len(index.keys()))
584
self.assertEqual(2, index.num_versions())
934
585
# check that the index used is the first one written. (Specific
935
586
# to KnitIndex style indices.
936
self.assertEqual("1", index._dictionary_compress([("version",)]))
937
self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
938
self.assertEqual(["options3"], index.get_options(("version",)))
939
self.assertEqual({("version",):(("parent",), ("other",))},
940
index.get_parent_map([("version",)]))
587
self.assertEqual("1", index._version_list_to_index(["version"]))
588
self.assertEqual((None, 3, 4), index.get_position("version"))
589
self.assertEqual(["options3"], index.get_options("version"))
590
self.assertEqual(["parent", "other"],
591
index.get_parents_with_ghosts("version"))
942
593
def test_read_compressed_parents(self):
943
594
transport = MockTransport([
945
596
"a option 0 1 :",
946
597
"b option 0 1 0 :",
947
598
"c option 0 1 1 0 :",
949
600
index = self.get_knit_index(transport, "filename", "r")
950
self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
951
index.get_parent_map([("b",), ("c",)]))
601
self.assertEqual(["a"], index.get_parents("b"))
602
self.assertEqual(["b", "a"], index.get_parents("c"))
953
604
def test_write_utf8_version_id(self):
954
605
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
955
606
utf8_revision_id = unicode_revision_id.encode('utf-8')
956
607
transport = MockTransport([
959
610
index = self.get_knit_index(transport, "filename", "r")
961
((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
962
call = transport.calls.pop(0)
963
# call[1][1] is a StringIO - we can't test it by simple equality.
964
self.assertEqual('put_file_non_atomic', call[0])
965
self.assertEqual('filename.kndx', call[1][0])
966
# With no history, _KndxIndex writes a new index:
967
self.assertEqual(_KndxIndex.HEADER +
968
"\n%s option 0 1 :" % (utf8_revision_id,),
969
call[1][1].getvalue())
970
self.assertEqual({'create_parent_dir': True}, call[2])
611
index.add_version(utf8_revision_id, ["option"], (None, 0, 1), [])
612
self.assertEqual(("append_bytes", ("filename",
613
"\n%s option 0 1 :" % (utf8_revision_id,)),
615
transport.calls.pop(0))
972
617
def test_write_utf8_parents(self):
973
618
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
974
619
utf8_revision_id = unicode_revision_id.encode('utf-8')
975
620
transport = MockTransport([
978
index = self.get_knit_index(transport, "filename", "r")
980
(("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
981
call = transport.calls.pop(0)
982
# call[1][1] is a StringIO - we can't test it by simple equality.
983
self.assertEqual('put_file_non_atomic', call[0])
984
self.assertEqual('filename.kndx', call[1][0])
985
# With no history, _KndxIndex writes a new index:
986
self.assertEqual(_KndxIndex.HEADER +
987
"\nversion option 0 1 .%s :" % (utf8_revision_id,),
988
call[1][1].getvalue())
989
self.assertEqual({'create_parent_dir': True}, call[2])
992
transport = MockTransport([
995
index = self.get_knit_index(transport, "filename", "r")
997
self.assertEqual(set(), index.keys())
999
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1000
self.assertEqual(set([("a",)]), index.keys())
1002
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1003
self.assertEqual(set([("a",)]), index.keys())
1005
index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
1006
self.assertEqual(set([("a",), ("b",)]), index.keys())
1008
def add_a_b(self, index, random_id=None):
1010
if random_id is not None:
1011
kwargs["random_id"] = random_id
1013
(("a",), ["option"], (("a",), 0, 1), [("b",)]),
1014
(("a",), ["opt"], (("a",), 1, 2), [("c",)]),
1015
(("b",), ["option"], (("b",), 2, 3), [("a",)])
1018
def assertIndexIsAB(self, index):
1023
index.get_parent_map(index.keys()))
1024
self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
1025
self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
1026
self.assertEqual(["opt"], index.get_options(("a",)))
623
index = self.get_knit_index(transport, "filename", "r")
624
index.add_version("version", ["option"], (None, 0, 1), [utf8_revision_id])
625
self.assertEqual(("append_bytes", ("filename",
626
"\nversion option 0 1 .%s :" % (utf8_revision_id,)),
628
transport.calls.pop(0))
630
def test_get_graph(self):
631
transport = MockTransport()
632
index = self.get_knit_index(transport, "filename", "w", create=True)
633
self.assertEqual([], index.get_graph())
635
index.add_version("a", ["option"], (None, 0, 1), ["b"])
636
self.assertEqual([("a", ["b"])], index.get_graph())
638
index.add_version("c", ["option"], (None, 0, 1), ["d"])
639
self.assertEqual([("a", ["b"]), ("c", ["d"])],
640
sorted(index.get_graph()))
642
def test_get_ancestry(self):
643
transport = MockTransport([
646
"b option 0 1 0 .e :",
647
"c option 0 1 1 0 :",
648
"d option 0 1 2 .f :"
650
index = self.get_knit_index(transport, "filename", "r")
652
self.assertEqual([], index.get_ancestry([]))
653
self.assertEqual(["a"], index.get_ancestry(["a"]))
654
self.assertEqual(["a", "b"], index.get_ancestry(["b"]))
655
self.assertEqual(["a", "b", "c"], index.get_ancestry(["c"]))
656
self.assertEqual(["a", "b", "c", "d"], index.get_ancestry(["d"]))
657
self.assertEqual(["a", "b"], index.get_ancestry(["a", "b"]))
658
self.assertEqual(["a", "b", "c"], index.get_ancestry(["a", "c"]))
660
self.assertRaises(RevisionNotPresent, index.get_ancestry, ["e"])
662
def test_get_ancestry_with_ghosts(self):
663
transport = MockTransport([
666
"b option 0 1 0 .e :",
667
"c option 0 1 0 .f .g :",
668
"d option 0 1 2 .h .j .k :"
670
index = self.get_knit_index(transport, "filename", "r")
672
self.assertEqual([], index.get_ancestry_with_ghosts([]))
673
self.assertEqual(["a"], index.get_ancestry_with_ghosts(["a"]))
674
self.assertEqual(["a", "e", "b"],
675
index.get_ancestry_with_ghosts(["b"]))
676
self.assertEqual(["a", "g", "f", "c"],
677
index.get_ancestry_with_ghosts(["c"]))
678
self.assertEqual(["a", "g", "f", "c", "k", "j", "h", "d"],
679
index.get_ancestry_with_ghosts(["d"]))
680
self.assertEqual(["a", "e", "b"],
681
index.get_ancestry_with_ghosts(["a", "b"]))
682
self.assertEqual(["a", "g", "f", "c"],
683
index.get_ancestry_with_ghosts(["a", "c"]))
685
["a", "g", "f", "c", "e", "b", "k", "j", "h", "d"],
686
index.get_ancestry_with_ghosts(["b", "d"]))
688
self.assertRaises(RevisionNotPresent,
689
index.get_ancestry_with_ghosts, ["e"])
691
def test_iter_parents(self):
692
transport = MockTransport()
693
index = self.get_knit_index(transport, "filename", "w", create=True)
695
index.add_version('r0', ['option'], (None, 0, 1), [])
697
index.add_version('r1', ['option'], (None, 0, 1), ['r0'])
699
index.add_version('r2', ['option'], (None, 0, 1), ['r1', 'r0'])
701
# cases: each sample data individually:
702
self.assertEqual(set([('r0', ())]),
703
set(index.iter_parents(['r0'])))
704
self.assertEqual(set([('r1', ('r0', ))]),
705
set(index.iter_parents(['r1'])))
706
self.assertEqual(set([('r2', ('r1', 'r0'))]),
707
set(index.iter_parents(['r2'])))
708
# no nodes returned for a missing node
709
self.assertEqual(set(),
710
set(index.iter_parents(['missing'])))
711
# 1 node returned with missing nodes skipped
712
self.assertEqual(set([('r1', ('r0', ))]),
713
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
715
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
716
set(index.iter_parents(['r0', 'r1'])))
717
# 2 nodes returned, missing skipped
718
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
719
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
721
def test_num_versions(self):
722
transport = MockTransport([
725
index = self.get_knit_index(transport, "filename", "r")
727
self.assertEqual(0, index.num_versions())
728
self.assertEqual(0, len(index))
730
index.add_version("a", ["option"], (None, 0, 1), [])
731
self.assertEqual(1, index.num_versions())
732
self.assertEqual(1, len(index))
734
index.add_version("a", ["option2"], (None, 1, 2), [])
735
self.assertEqual(1, index.num_versions())
736
self.assertEqual(1, len(index))
738
index.add_version("b", ["option"], (None, 0, 1), [])
739
self.assertEqual(2, index.num_versions())
740
self.assertEqual(2, len(index))
742
def test_get_versions(self):
743
transport = MockTransport([
746
index = self.get_knit_index(transport, "filename", "r")
748
self.assertEqual([], index.get_versions())
750
index.add_version("a", ["option"], (None, 0, 1), [])
751
self.assertEqual(["a"], index.get_versions())
753
index.add_version("a", ["option"], (None, 0, 1), [])
754
self.assertEqual(["a"], index.get_versions())
756
index.add_version("b", ["option"], (None, 0, 1), [])
757
self.assertEqual(["a", "b"], index.get_versions())
759
def test_add_version(self):
760
transport = MockTransport([
763
index = self.get_knit_index(transport, "filename", "r")
765
index.add_version("a", ["option"], (None, 0, 1), ["b"])
766
self.assertEqual(("append_bytes",
767
("filename", "\na option 0 1 .b :"),
768
{}), transport.calls.pop(0))
769
self.assertTrue(index.has_version("a"))
770
self.assertEqual(1, index.num_versions())
771
self.assertEqual((None, 0, 1), index.get_position("a"))
772
self.assertEqual(["option"], index.get_options("a"))
773
self.assertEqual(["b"], index.get_parents_with_ghosts("a"))
775
index.add_version("a", ["opt"], (None, 1, 2), ["c"])
776
self.assertEqual(("append_bytes",
777
("filename", "\na opt 1 2 .c :"),
778
{}), transport.calls.pop(0))
779
self.assertTrue(index.has_version("a"))
780
self.assertEqual(1, index.num_versions())
781
self.assertEqual((None, 1, 2), index.get_position("a"))
782
self.assertEqual(["opt"], index.get_options("a"))
783
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
785
index.add_version("b", ["option"], (None, 2, 3), ["a"])
786
self.assertEqual(("append_bytes",
787
("filename", "\nb option 2 3 0 :"),
788
{}), transport.calls.pop(0))
789
self.assertTrue(index.has_version("b"))
790
self.assertEqual(2, index.num_versions())
791
self.assertEqual((None, 2, 3), index.get_position("b"))
792
self.assertEqual(["option"], index.get_options("b"))
793
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1028
795
def test_add_versions(self):
1029
796
transport = MockTransport([
1032
799
index = self.get_knit_index(transport, "filename", "r")
1035
call = transport.calls.pop(0)
1036
# call[1][1] is a StringIO - we can't test it by simple equality.
1037
self.assertEqual('put_file_non_atomic', call[0])
1038
self.assertEqual('filename.kndx', call[1][0])
1039
# With no history, _KndxIndex writes a new index:
802
("a", ["option"], (None, 0, 1), ["b"]),
803
("a", ["opt"], (None, 1, 2), ["c"]),
804
("b", ["option"], (None, 2, 3), ["a"])
806
self.assertEqual(("append_bytes", ("filename",
1042
807
"\na option 0 1 .b :"
1043
808
"\na opt 1 2 .c :"
1044
"\nb option 2 3 0 :",
1045
call[1][1].getvalue())
1046
self.assertEqual({'create_parent_dir': True}, call[2])
1047
self.assertIndexIsAB(index)
810
), {}), transport.calls.pop(0))
811
self.assertTrue(index.has_version("a"))
812
self.assertTrue(index.has_version("b"))
813
self.assertEqual(2, index.num_versions())
814
self.assertEqual((None, 1, 2), index.get_position("a"))
815
self.assertEqual((None, 2, 3), index.get_position("b"))
816
self.assertEqual(["opt"], index.get_options("a"))
817
self.assertEqual(["option"], index.get_options("b"))
818
self.assertEqual(["c"], index.get_parents_with_ghosts("a"))
819
self.assertEqual(["a"], index.get_parents_with_ghosts("b"))
1049
821
def test_add_versions_random_id_is_accepted(self):
1050
822
transport = MockTransport([
1053
825
index = self.get_knit_index(transport, "filename", "r")
1054
self.add_a_b(index, random_id=True)
828
("a", ["option"], (None, 0, 1), ["b"]),
829
("a", ["opt"], (None, 1, 2), ["c"]),
830
("b", ["option"], (None, 2, 3), ["a"])
1056
833
def test_delay_create_and_add_versions(self):
1057
834
transport = MockTransport()
1059
index = self.get_knit_index(transport, "filename", "w")
836
index = self.get_knit_index(transport, "filename", "w",
837
create=True, file_mode="wb", create_parent_dir=True,
838
delay_create=True, dir_mode=0777)
1061
839
self.assertEqual([], transport.calls)
1064
#[ {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
1066
# Two calls: one during which we load the existing index (and when its
1067
# missing create it), then a second where we write the contents out.
1068
self.assertEqual(2, len(transport.calls))
1069
call = transport.calls.pop(0)
1070
self.assertEqual('put_file_non_atomic', call[0])
1071
self.assertEqual('filename.kndx', call[1][0])
1072
# With no history, _KndxIndex writes a new index:
1073
self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
1074
self.assertEqual({'create_parent_dir': True}, call[2])
1075
call = transport.calls.pop(0)
1076
# call[1][1] is a StringIO - we can't test it by simple equality.
1077
self.assertEqual('put_file_non_atomic', call[0])
1078
self.assertEqual('filename.kndx', call[1][0])
1079
# With no history, _KndxIndex writes a new index:
842
("a", ["option"], (None, 0, 1), ["b"]),
843
("a", ["opt"], (None, 1, 2), ["c"]),
844
("b", ["option"], (None, 2, 3), ["a"])
846
name, (filename, f), kwargs = transport.calls.pop(0)
847
self.assertEqual("put_file_non_atomic", name)
849
{"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
851
self.assertEqual("filename", filename)
1082
854
"\na option 0 1 .b :"
1083
855
"\na opt 1 2 .c :"
1084
856
"\nb option 2 3 0 :",
1085
call[1][1].getvalue())
1086
self.assertEqual({'create_parent_dir': True}, call[2])
1088
def assertTotalBuildSize(self, size, keys, positions):
1089
self.assertEqual(size,
1090
knit._get_total_build_size(None, keys, positions))
1092
def test__get_total_build_size(self):
1094
('a',): (('fulltext', False), (('a',), 0, 100), None),
1095
('b',): (('line-delta', False), (('b',), 100, 21), ('a',)),
1096
('c',): (('line-delta', False), (('c',), 121, 35), ('b',)),
1097
('d',): (('line-delta', False), (('d',), 156, 12), ('b',)),
1099
self.assertTotalBuildSize(100, [('a',)], positions)
1100
self.assertTotalBuildSize(121, [('b',)], positions)
1101
# c needs both a & b
1102
self.assertTotalBuildSize(156, [('c',)], positions)
1103
# we shouldn't count 'b' twice
1104
self.assertTotalBuildSize(156, [('b',), ('c',)], positions)
1105
self.assertTotalBuildSize(133, [('d',)], positions)
1106
self.assertTotalBuildSize(168, [('c',), ('d',)], positions)
859
def test_has_version(self):
860
transport = MockTransport([
864
index = self.get_knit_index(transport, "filename", "r")
866
self.assertTrue(index.has_version("a"))
867
self.assertFalse(index.has_version("b"))
1108
869
def test_get_position(self):
1109
870
transport = MockTransport([
1111
872
"a option 0 1 :",
1112
873
"b option 1 2 :"
1114
875
index = self.get_knit_index(transport, "filename", "r")
1116
self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
1117
self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
877
self.assertEqual((None, 0, 1), index.get_position("a"))
878
self.assertEqual((None, 1, 2), index.get_position("b"))
1119
880
def test_get_method(self):
1120
881
transport = MockTransport([
1122
883
"a fulltext,unknown 0 1 :",
1123
884
"b unknown,line-delta 1 2 :",
1254
def test_scan_unvalidated_index_not_implemented(self):
1255
transport = MockTransport()
1256
index = self.get_knit_index(transport, 'filename', 'r')
1258
NotImplementedError, index.scan_unvalidated_index,
1259
'dummy graph_index')
1261
NotImplementedError, index.get_missing_compression_parents)
1263
1044
def test_short_line(self):
1264
1045
transport = MockTransport([
1266
1047
"a option 0 10 :",
1267
1048
"b option 10 10 0", # This line isn't terminated, ignored
1269
1050
index = self.get_knit_index(transport, "filename", "r")
1270
self.assertEqual(set([('a',)]), index.keys())
1051
self.assertEqual(['a'], index.get_versions())
1272
1053
def test_skip_incomplete_record(self):
1273
1054
# A line with bogus data should just be skipped
1274
1055
transport = MockTransport([
1276
1057
"a option 0 10 :",
1277
1058
"b option 10 10 0", # This line isn't terminated, ignored
1278
1059
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1280
1061
index = self.get_knit_index(transport, "filename", "r")
1281
self.assertEqual(set([('a',), ('c',)]), index.keys())
1062
self.assertEqual(['a', 'c'], index.get_versions())
1283
1064
def test_trailing_characters(self):
1284
1065
# A line with bogus data should just be skipped
1285
1066
transport = MockTransport([
1287
1068
"a option 0 10 :",
1288
1069
"b option 10 10 0 :a", # This line has extra trailing characters
1289
1070
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1291
1072
index = self.get_knit_index(transport, "filename", "r")
1292
self.assertEqual(set([('a',), ('c',)]), index.keys())
1073
self.assertEqual(['a', 'c'], index.get_versions())
1295
1076
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1297
_test_needs_features = [compiled_knit_feature]
1299
def get_knit_index(self, transport, name, mode):
1300
mapper = ConstantMapper(name)
1301
from bzrlib._knit_load_data_pyx import _load_data_c
1302
self.overrideAttr(knit, '_load_data', _load_data_c)
1303
allow_writes = lambda: mode == 'w'
1304
return _KndxIndex(transport, mapper, lambda:None,
1305
allow_writes, lambda:True)
1308
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1310
def make_annotator(self):
1311
factory = knit.make_pack_factory(True, True, 1)
1312
vf = factory(self.get_transport())
1313
return knit._KnitAnnotator(vf)
1315
def test__expand_fulltext(self):
1316
ann = self.make_annotator()
1317
rev_key = ('rev-id',)
1318
ann._num_compression_children[rev_key] = 1
1319
res = ann._expand_record(rev_key, (('parent-id',),), None,
1320
['line1\n', 'line2\n'], ('fulltext', True))
1321
# The content object and text lines should be cached appropriately
1322
self.assertEqual(['line1\n', 'line2'], res)
1323
content_obj = ann._content_objects[rev_key]
1324
self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1325
self.assertEqual(res, content_obj.text())
1326
self.assertEqual(res, ann._text_cache[rev_key])
1328
def test__expand_delta_comp_parent_not_available(self):
1329
# Parent isn't available yet, so we return nothing, but queue up this
1330
# node for later processing
1331
ann = self.make_annotator()
1332
rev_key = ('rev-id',)
1333
parent_key = ('parent-id',)
1334
record = ['0,1,1\n', 'new-line\n']
1335
details = ('line-delta', False)
1336
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1338
self.assertEqual(None, res)
1339
self.assertTrue(parent_key in ann._pending_deltas)
1340
pending = ann._pending_deltas[parent_key]
1341
self.assertEqual(1, len(pending))
1342
self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1344
def test__expand_record_tracks_num_children(self):
1345
ann = self.make_annotator()
1346
rev_key = ('rev-id',)
1347
rev2_key = ('rev2-id',)
1348
parent_key = ('parent-id',)
1349
record = ['0,1,1\n', 'new-line\n']
1350
details = ('line-delta', False)
1351
ann._num_compression_children[parent_key] = 2
1352
ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1353
('fulltext', False))
1354
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1356
self.assertEqual({parent_key: 1}, ann._num_compression_children)
1357
# Expanding the second child should remove the content object, and the
1358
# num_compression_children entry
1359
res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1361
self.assertFalse(parent_key in ann._content_objects)
1362
self.assertEqual({}, ann._num_compression_children)
1363
# We should not cache the content_objects for rev2 and rev, because
1364
# they do not have compression children of their own.
1365
self.assertEqual({}, ann._content_objects)
1367
def test__expand_delta_records_blocks(self):
1368
ann = self.make_annotator()
1369
rev_key = ('rev-id',)
1370
parent_key = ('parent-id',)
1371
record = ['0,1,1\n', 'new-line\n']
1372
details = ('line-delta', True)
1373
ann._num_compression_children[parent_key] = 2
1374
ann._expand_record(parent_key, (), None,
1375
['line1\n', 'line2\n', 'line3\n'],
1376
('fulltext', False))
1377
ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1378
self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1379
ann._matching_blocks)
1380
rev2_key = ('rev2-id',)
1381
record = ['0,1,1\n', 'new-line\n']
1382
details = ('line-delta', False)
1383
ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1384
self.assertEqual([(1, 1, 2), (3, 3, 0)],
1385
ann._matching_blocks[(rev2_key, parent_key)])
1387
def test__get_parent_ann_uses_matching_blocks(self):
1388
ann = self.make_annotator()
1389
rev_key = ('rev-id',)
1390
parent_key = ('parent-id',)
1391
parent_ann = [(parent_key,)]*3
1392
block_key = (rev_key, parent_key)
1393
ann._annotations_cache[parent_key] = parent_ann
1394
ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1395
# We should not try to access any parent_lines content, because we know
1396
# we already have the matching blocks
1397
par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1398
['1\n', '2\n', '3\n'], parent_key)
1399
self.assertEqual(parent_ann, par_ann)
1400
self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1401
self.assertEqual({}, ann._matching_blocks)
1403
def test__process_pending(self):
1404
ann = self.make_annotator()
1405
rev_key = ('rev-id',)
1408
record = ['0,1,1\n', 'new-line\n']
1409
details = ('line-delta', False)
1410
p1_record = ['line1\n', 'line2\n']
1411
ann._num_compression_children[p1_key] = 1
1412
res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1414
self.assertEqual(None, res)
1415
# self.assertTrue(p1_key in ann._pending_deltas)
1416
self.assertEqual({}, ann._pending_annotation)
1417
# Now insert p1, and we should be able to expand the delta
1418
res = ann._expand_record(p1_key, (), None, p1_record,
1419
('fulltext', False))
1420
self.assertEqual(p1_record, res)
1421
ann._annotations_cache[p1_key] = [(p1_key,)]*2
1422
res = ann._process_pending(p1_key)
1423
self.assertEqual([], res)
1424
self.assertFalse(p1_key in ann._pending_deltas)
1425
self.assertTrue(p2_key in ann._pending_annotation)
1426
self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1427
ann._pending_annotation)
1428
# Now fill in parent 2, and pending annotation should be satisfied
1429
res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1430
ann._annotations_cache[p2_key] = []
1431
res = ann._process_pending(p2_key)
1432
self.assertEqual([rev_key], res)
1433
self.assertEqual({}, ann._pending_annotation)
1434
self.assertEqual({}, ann._pending_deltas)
1436
def test_record_delta_removes_basis(self):
1437
ann = self.make_annotator()
1438
ann._expand_record(('parent-id',), (), None,
1439
['line1\n', 'line2\n'], ('fulltext', False))
1440
ann._num_compression_children['parent-id'] = 2
1442
def test_annotate_special_text(self):
1443
ann = self.make_annotator()
1445
rev1_key = ('rev-1',)
1446
rev2_key = ('rev-2',)
1447
rev3_key = ('rev-3',)
1448
spec_key = ('special:',)
1449
vf.add_lines(rev1_key, [], ['initial content\n'])
1450
vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1453
vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1456
spec_text = ('initial content\n'
1460
ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1461
anns, lines = ann.annotate(spec_key)
1462
self.assertEqual([(rev1_key,),
1463
(rev2_key, rev3_key),
1467
self.assertEqualDiff(spec_text, ''.join(lines))
1078
_test_needs_features = [CompiledKnitFeature]
1080
def get_knit_index(self, *args, **kwargs):
1081
orig = knit._load_data
1083
knit._load_data = orig
1084
self.addCleanup(reset)
1085
from bzrlib._knit_load_data_c import _load_data_c
1086
knit._load_data = _load_data_c
1087
return _KnitIndex(*args, **kwargs)
1470
1091
class KnitTests(TestCaseWithTransport):
1471
1092
"""Class containing knit test helper routines."""
1473
def make_test_knit(self, annotate=False, name='test'):
1474
mapper = ConstantMapper(name)
1475
return make_file_factory(annotate, mapper)(self.get_transport())
1478
class TestBadShaError(KnitTests):
1479
"""Tests for handling of sha errors."""
1481
def test_sha_exception_has_text(self):
1482
# having the failed text included in the error allows for recovery.
1483
source = self.make_test_knit()
1484
target = self.make_test_knit(name="target")
1485
if not source._max_delta_chain:
1486
raise TestNotApplicable(
1487
"cannot get delta-caused sha failures without deltas.")
1490
broken = ('broken',)
1491
source.add_lines(basis, (), ['foo\n'])
1492
source.add_lines(broken, (basis,), ['foo\n', 'bar\n'])
1493
# Seed target with a bad basis text
1494
target.add_lines(basis, (), ['gam\n'])
1495
target.insert_record_stream(
1496
source.get_record_stream([broken], 'unordered', False))
1497
err = self.assertRaises(errors.KnitCorrupt,
1498
target.get_record_stream([broken], 'unordered', True
1499
).next().get_bytes_as, 'chunked')
1500
self.assertEqual(['gam\n', 'bar\n'], err.content)
1501
# Test for formatting with live data
1502
self.assertStartsWith(str(err), "Knit ")
1094
def make_test_knit(self, annotate=False, delay_create=False, index=None,
1097
factory = KnitPlainFactory()
1100
return KnitVersionedFile(name, get_transport('.'), access_mode='w',
1101
factory=factory, create=True,
1102
delay_create=delay_create, index=index)
1104
def assertRecordContentEqual(self, knit, version_id, candidate_content):
1105
"""Assert that some raw record content matches the raw record content
1106
for a particular version_id in the given knit.
1108
index_memo = knit._index.get_position(version_id)
1109
record = (version_id, index_memo)
1110
[(_, expected_content)] = list(knit._data.read_records_iter_raw([record]))
1111
self.assertEqual(expected_content, candidate_content)
1114
class BasicKnitTests(KnitTests):
1116
def add_stock_one_and_one_a(self, k):
1117
k.add_lines('text-1', [], split_lines(TEXT_1))
1118
k.add_lines('text-1a', ['text-1'], split_lines(TEXT_1A))
1120
def test_knit_constructor(self):
1121
"""Construct empty k"""
1122
self.make_test_knit()
1124
def test_make_explicit_index(self):
1125
"""We can supply an index to use."""
1126
knit = KnitVersionedFile('test', get_transport('.'),
1127
index='strangelove')
1128
self.assertEqual(knit._index, 'strangelove')
1130
def test_knit_add(self):
1131
"""Store one text in knit and retrieve"""
1132
k = self.make_test_knit()
1133
k.add_lines('text-1', [], split_lines(TEXT_1))
1134
self.assertTrue(k.has_version('text-1'))
1135
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1137
def test_newline_empty_lines(self):
1138
# ensure that ["\n"] round trips ok.
1139
knit = self.make_test_knit()
1140
knit.add_lines('a', [], ["\n"])
1141
knit.add_lines_with_ghosts('b', [], ["\n"])
1142
self.assertEqual(["\n"], knit.get_lines('a'))
1143
self.assertEqual(["\n"], knit.get_lines('b'))
1144
self.assertEqual(['fulltext'], knit._index.get_options('a'))
1145
self.assertEqual(['fulltext'], knit._index.get_options('b'))
1146
knit.add_lines('c', ['a'], ["\n"])
1147
knit.add_lines_with_ghosts('d', ['b'], ["\n"])
1148
self.assertEqual(["\n"], knit.get_lines('c'))
1149
self.assertEqual(["\n"], knit.get_lines('d'))
1150
self.assertEqual(['line-delta'], knit._index.get_options('c'))
1151
self.assertEqual(['line-delta'], knit._index.get_options('d'))
1153
def test_empty_lines(self):
1154
# bizarrely, [] is not listed as having no-eol.
1155
knit = self.make_test_knit()
1156
knit.add_lines('a', [], [])
1157
knit.add_lines_with_ghosts('b', [], [])
1158
self.assertEqual([], knit.get_lines('a'))
1159
self.assertEqual([], knit.get_lines('b'))
1160
self.assertEqual(['fulltext'], knit._index.get_options('a'))
1161
self.assertEqual(['fulltext'], knit._index.get_options('b'))
1162
knit.add_lines('c', ['a'], [])
1163
knit.add_lines_with_ghosts('d', ['b'], [])
1164
self.assertEqual([], knit.get_lines('c'))
1165
self.assertEqual([], knit.get_lines('d'))
1166
self.assertEqual(['line-delta'], knit._index.get_options('c'))
1167
self.assertEqual(['line-delta'], knit._index.get_options('d'))
1169
def test_knit_reload(self):
1170
# test that the content in a reloaded knit is correct
1171
k = self.make_test_knit()
1172
k.add_lines('text-1', [], split_lines(TEXT_1))
1174
k2 = KnitVersionedFile('test', get_transport('.'), access_mode='r', factory=KnitPlainFactory(), create=True)
1175
self.assertTrue(k2.has_version('text-1'))
1176
self.assertEqualDiff(''.join(k2.get_lines('text-1')), TEXT_1)
1178
def test_knit_several(self):
1179
"""Store several texts in a knit"""
1180
k = self.make_test_knit()
1181
k.add_lines('text-1', [], split_lines(TEXT_1))
1182
k.add_lines('text-2', [], split_lines(TEXT_2))
1183
self.assertEqualDiff(''.join(k.get_lines('text-1')), TEXT_1)
1184
self.assertEqualDiff(''.join(k.get_lines('text-2')), TEXT_2)
1186
def test_repeated_add(self):
1187
"""Knit traps attempt to replace existing version"""
1188
k = self.make_test_knit()
1189
k.add_lines('text-1', [], split_lines(TEXT_1))
1190
self.assertRaises(RevisionAlreadyPresent,
1192
'text-1', [], split_lines(TEXT_1))
1194
def test_empty(self):
1195
k = self.make_test_knit(True)
1196
k.add_lines('text-1', [], [])
1197
self.assertEquals(k.get_lines('text-1'), [])
1199
def test_incomplete(self):
1200
"""Test if texts without a ending line-end can be inserted and
1202
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1203
k.add_lines('text-1', [], ['a\n', 'b' ])
1204
k.add_lines('text-2', ['text-1'], ['a\rb\n', 'b\n'])
1205
# reopening ensures maximum room for confusion
1206
k = KnitVersionedFile('test', get_transport('.'), delta=False, create=True)
1207
self.assertEquals(k.get_lines('text-1'), ['a\n', 'b' ])
1208
self.assertEquals(k.get_lines('text-2'), ['a\rb\n', 'b\n'])
1210
def test_delta(self):
1211
"""Expression of knit delta as lines"""
1212
k = self.make_test_knit()
1213
td = list(line_delta(TEXT_1.splitlines(True),
1214
TEXT_1A.splitlines(True)))
1215
self.assertEqualDiff(''.join(td), delta_1_1a)
1216
out = apply_line_delta(TEXT_1.splitlines(True), td)
1217
self.assertEqualDiff(''.join(out), TEXT_1A)
1219
def test_add_with_parents(self):
1220
"""Store in knit with parents"""
1221
k = self.make_test_knit()
1222
self.add_stock_one_and_one_a(k)
1223
self.assertEquals(k.get_parents('text-1'), [])
1224
self.assertEquals(k.get_parents('text-1a'), ['text-1'])
1226
def test_ancestry(self):
1227
"""Store in knit with parents"""
1228
k = self.make_test_knit()
1229
self.add_stock_one_and_one_a(k)
1230
self.assertEquals(set(k.get_ancestry(['text-1a'])), set(['text-1a', 'text-1']))
1232
def test_add_delta(self):
1233
"""Store in knit with parents"""
1234
k = KnitVersionedFile('test', get_transport('.'), factory=KnitPlainFactory(),
1235
delta=True, create=True)
1236
self.add_stock_one_and_one_a(k)
1238
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1240
def test_add_delta_knit_graph_index(self):
1241
"""Does adding work with a KnitGraphIndex."""
1242
index = InMemoryGraphIndex(2)
1243
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
1245
k = KnitVersionedFile('test', get_transport('.'),
1246
delta=True, create=True, index=knit_index)
1247
self.add_stock_one_and_one_a(k)
1249
self.assertEqualDiff(''.join(k.get_lines('text-1a')), TEXT_1A)
1250
# check the index had the right data added.
1251
self.assertEqual(set([
1252
(index, ('text-1', ), ' 0 127', ((), ())),
1253
(index, ('text-1a', ), ' 127 140', ((('text-1', ),), (('text-1', ),))),
1254
]), set(index.iter_all_entries()))
1255
# we should not have a .kndx file
1256
self.assertFalse(get_transport('.').has('test.kndx'))
1258
def test_annotate(self):
1260
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1261
delta=True, create=True)
1262
self.insert_and_test_small_annotate(k)
1264
def insert_and_test_small_annotate(self, k):
1265
"""test annotation with k works correctly."""
1266
k.add_lines('text-1', [], ['a\n', 'b\n'])
1267
k.add_lines('text-2', ['text-1'], ['a\n', 'c\n'])
1269
origins = k.annotate('text-2')
1270
self.assertEquals(origins[0], ('text-1', 'a\n'))
1271
self.assertEquals(origins[1], ('text-2', 'c\n'))
1273
def test_annotate_fulltext(self):
1275
k = KnitVersionedFile('knit', get_transport('.'), factory=KnitAnnotateFactory(),
1276
delta=False, create=True)
1277
self.insert_and_test_small_annotate(k)
1279
def test_annotate_merge_1(self):
1280
k = self.make_test_knit(True)
1281
k.add_lines('text-a1', [], ['a\n', 'b\n'])
1282
k.add_lines('text-a2', [], ['d\n', 'c\n'])
1283
k.add_lines('text-am', ['text-a1', 'text-a2'], ['d\n', 'b\n'])
1284
origins = k.annotate('text-am')
1285
self.assertEquals(origins[0], ('text-a2', 'd\n'))
1286
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1288
def test_annotate_merge_2(self):
1289
k = self.make_test_knit(True)
1290
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1291
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1292
k.add_lines('text-am', ['text-a1', 'text-a2'], ['a\n', 'y\n', 'c\n'])
1293
origins = k.annotate('text-am')
1294
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1295
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1296
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1298
def test_annotate_merge_9(self):
1299
k = self.make_test_knit(True)
1300
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1301
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1302
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'c\n'])
1303
origins = k.annotate('text-am')
1304
self.assertEquals(origins[0], ('text-am', 'k\n'))
1305
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1306
self.assertEquals(origins[2], ('text-a1', 'c\n'))
1308
def test_annotate_merge_3(self):
1309
k = self.make_test_knit(True)
1310
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1311
k.add_lines('text-a2', [] ,['x\n', 'y\n', 'z\n'])
1312
k.add_lines('text-am', ['text-a1', 'text-a2'], ['k\n', 'y\n', 'z\n'])
1313
origins = k.annotate('text-am')
1314
self.assertEquals(origins[0], ('text-am', 'k\n'))
1315
self.assertEquals(origins[1], ('text-a2', 'y\n'))
1316
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1318
def test_annotate_merge_4(self):
1319
k = self.make_test_knit(True)
1320
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1321
k.add_lines('text-a2', [], ['x\n', 'y\n', 'z\n'])
1322
k.add_lines('text-a3', ['text-a1'], ['a\n', 'b\n', 'p\n'])
1323
k.add_lines('text-am', ['text-a2', 'text-a3'], ['a\n', 'b\n', 'z\n'])
1324
origins = k.annotate('text-am')
1325
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1326
self.assertEquals(origins[1], ('text-a1', 'b\n'))
1327
self.assertEquals(origins[2], ('text-a2', 'z\n'))
1329
def test_annotate_merge_5(self):
1330
k = self.make_test_knit(True)
1331
k.add_lines('text-a1', [], ['a\n', 'b\n', 'c\n'])
1332
k.add_lines('text-a2', [], ['d\n', 'e\n', 'f\n'])
1333
k.add_lines('text-a3', [], ['x\n', 'y\n', 'z\n'])
1334
k.add_lines('text-am',
1335
['text-a1', 'text-a2', 'text-a3'],
1336
['a\n', 'e\n', 'z\n'])
1337
origins = k.annotate('text-am')
1338
self.assertEquals(origins[0], ('text-a1', 'a\n'))
1339
self.assertEquals(origins[1], ('text-a2', 'e\n'))
1340
self.assertEquals(origins[2], ('text-a3', 'z\n'))
1342
def test_annotate_file_cherry_pick(self):
1343
k = self.make_test_knit(True)
1344
k.add_lines('text-1', [], ['a\n', 'b\n', 'c\n'])
1345
k.add_lines('text-2', ['text-1'], ['d\n', 'e\n', 'f\n'])
1346
k.add_lines('text-3', ['text-2', 'text-1'], ['a\n', 'b\n', 'c\n'])
1347
origins = k.annotate('text-3')
1348
self.assertEquals(origins[0], ('text-1', 'a\n'))
1349
self.assertEquals(origins[1], ('text-1', 'b\n'))
1350
self.assertEquals(origins[2], ('text-1', 'c\n'))
1352
def _test_join_with_factories(self, k1_factory, k2_factory):
1353
k1 = KnitVersionedFile('test1', get_transport('.'), factory=k1_factory, create=True)
1354
k1.add_lines('text-a', [], ['a1\n', 'a2\n', 'a3\n'])
1355
k1.add_lines('text-b', ['text-a'], ['a1\n', 'b2\n', 'a3\n'])
1356
k1.add_lines('text-c', [], ['c1\n', 'c2\n', 'c3\n'])
1357
k1.add_lines('text-d', ['text-c'], ['c1\n', 'd2\n', 'd3\n'])
1358
k1.add_lines('text-m', ['text-b', 'text-d'], ['a1\n', 'b2\n', 'd3\n'])
1359
k2 = KnitVersionedFile('test2', get_transport('.'), factory=k2_factory, create=True)
1360
count = k2.join(k1, version_ids=['text-m'])
1361
self.assertEquals(count, 5)
1362
self.assertTrue(k2.has_version('text-a'))
1363
self.assertTrue(k2.has_version('text-c'))
1364
origins = k2.annotate('text-m')
1365
self.assertEquals(origins[0], ('text-a', 'a1\n'))
1366
self.assertEquals(origins[1], ('text-b', 'b2\n'))
1367
self.assertEquals(origins[2], ('text-d', 'd3\n'))
1369
def test_knit_join_plain_to_plain(self):
1370
"""Test joining a plain knit with a plain knit."""
1371
self._test_join_with_factories(KnitPlainFactory(), KnitPlainFactory())
1373
def test_knit_join_anno_to_anno(self):
1374
"""Test joining an annotated knit with an annotated knit."""
1375
self._test_join_with_factories(None, None)
1377
def test_knit_join_anno_to_plain(self):
1378
"""Test joining an annotated knit with a plain knit."""
1379
self._test_join_with_factories(None, KnitPlainFactory())
1381
def test_knit_join_plain_to_anno(self):
1382
"""Test joining a plain knit with an annotated knit."""
1383
self._test_join_with_factories(KnitPlainFactory(), None)
1385
def test_reannotate(self):
1386
k1 = KnitVersionedFile('knit1', get_transport('.'),
1387
factory=KnitAnnotateFactory(), create=True)
1389
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1391
k1.add_lines('text-b', ['text-a'], ['a\n', 'c\n'])
1393
k2 = KnitVersionedFile('test2', get_transport('.'),
1394
factory=KnitAnnotateFactory(), create=True)
1395
k2.join(k1, version_ids=['text-b'])
1398
k1.add_lines('text-X', ['text-b'], ['a\n', 'b\n'])
1400
k2.add_lines('text-c', ['text-b'], ['z\n', 'c\n'])
1402
k2.add_lines('text-Y', ['text-b'], ['b\n', 'c\n'])
1404
# test-c will have index 3
1405
k1.join(k2, version_ids=['text-c'])
1407
lines = k1.get_lines('text-c')
1408
self.assertEquals(lines, ['z\n', 'c\n'])
1410
origins = k1.annotate('text-c')
1411
self.assertEquals(origins[0], ('text-c', 'z\n'))
1412
self.assertEquals(origins[1], ('text-b', 'c\n'))
1414
def test_get_line_delta_texts(self):
1415
"""Make sure we can call get_texts on text with reused line deltas"""
1416
k1 = KnitVersionedFile('test1', get_transport('.'),
1417
factory=KnitPlainFactory(), create=True)
1422
parents = ['%d' % (t-1)]
1423
k1.add_lines('%d' % t, parents, ['hello\n'] * t)
1424
k1.get_texts(('%d' % t) for t in range(3))
1426
def test_iter_lines_reads_in_order(self):
1427
instrumented_t = get_transport('trace+memory:///')
1428
k1 = KnitVersionedFile('id', instrumented_t, create=True, delta=True)
1429
self.assertEqual([('get', 'id.kndx',)], instrumented_t._activity)
1430
# add texts with no required ordering
1431
k1.add_lines('base', [], ['text\n'])
1432
k1.add_lines('base2', [], ['text2\n'])
1434
# clear the logged activity, but preserve the list instance in case of
1435
# clones pointing at it.
1436
del instrumented_t._activity[:]
1437
# request a last-first iteration
1438
results = list(k1.iter_lines_added_or_present_in_versions(
1441
[('readv', 'id.knit', [(0, 87), (87, 89)], False, None)],
1442
instrumented_t._activity)
1443
self.assertEqual([('text\n', 'base'), ('text2\n', 'base2')], results)
1445
def test_create_empty_annotated(self):
1446
k1 = self.make_test_knit(True)
1448
k1.add_lines('text-a', [], ['a\n', 'b\n'])
1449
k2 = k1.create_empty('t', MemoryTransport())
1450
self.assertTrue(isinstance(k2.factory, KnitAnnotateFactory))
1451
self.assertEqual(k1.delta, k2.delta)
1452
# the generic test checks for empty content and file class
1454
def test_knit_format(self):
1455
# this tests that a new knit index file has the expected content
1456
# and that is writes the data we expect as records are added.
1457
knit = self.make_test_knit(True)
1458
# Now knit files are not created until we first add data to them
1459
self.assertFileEqual("# bzr knit index 8\n", 'test.kndx')
1460
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1461
self.assertFileEqual(
1462
"# bzr knit index 8\n"
1464
"revid fulltext 0 84 .a_ghost :",
1466
knit.add_lines_with_ghosts('revid2', ['revid'], ['a\n'])
1467
self.assertFileEqual(
1468
"# bzr knit index 8\n"
1469
"\nrevid fulltext 0 84 .a_ghost :"
1470
"\nrevid2 line-delta 84 82 0 :",
1472
# we should be able to load this file again
1473
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1474
self.assertEqual(['revid', 'revid2'], knit.versions())
1475
# write a short write to the file and ensure that its ignored
1476
indexfile = file('test.kndx', 'ab')
1477
indexfile.write('\nrevid3 line-delta 166 82 1 2 3 4 5 .phwoar:demo ')
1479
# we should be able to load this file again
1480
knit = KnitVersionedFile('test', get_transport('.'), access_mode='w')
1481
self.assertEqual(['revid', 'revid2'], knit.versions())
1482
# and add a revision with the same id the failed write had
1483
knit.add_lines('revid3', ['revid2'], ['a\n'])
1484
# and when reading it revid3 should now appear.
1485
knit = KnitVersionedFile('test', get_transport('.'), access_mode='r')
1486
self.assertEqual(['revid', 'revid2', 'revid3'], knit.versions())
1487
self.assertEqual(['revid2'], knit.get_parents('revid3'))
1489
def test_delay_create(self):
1490
"""Test that passing delay_create=True creates files late"""
1491
knit = self.make_test_knit(annotate=True, delay_create=True)
1492
self.failIfExists('test.knit')
1493
self.failIfExists('test.kndx')
1494
knit.add_lines_with_ghosts('revid', ['a_ghost'], ['a\n'])
1495
self.failUnlessExists('test.knit')
1496
self.assertFileEqual(
1497
"# bzr knit index 8\n"
1499
"revid fulltext 0 84 .a_ghost :",
1502
def test_create_parent_dir(self):
1503
"""create_parent_dir can create knits in nonexistant dirs"""
1504
# Has no effect if we don't set 'delay_create'
1505
trans = get_transport('.')
1506
self.assertRaises(NoSuchFile, KnitVersionedFile, 'dir/test',
1507
trans, access_mode='w', factory=None,
1508
create=True, create_parent_dir=True)
1509
# Nothing should have changed yet
1510
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1511
factory=None, create=True,
1512
create_parent_dir=True,
1514
self.failIfExists('dir/test.knit')
1515
self.failIfExists('dir/test.kndx')
1516
self.failIfExists('dir')
1517
knit.add_lines('revid', [], ['a\n'])
1518
self.failUnlessExists('dir')
1519
self.failUnlessExists('dir/test.knit')
1520
self.assertFileEqual(
1521
"# bzr knit index 8\n"
1523
"revid fulltext 0 84 :",
1526
def test_create_mode_700(self):
1527
trans = get_transport('.')
1528
if not trans._can_roundtrip_unix_modebits():
1529
# Can't roundtrip, so no need to run this test
1531
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1532
factory=None, create=True,
1533
create_parent_dir=True,
1537
knit.add_lines('revid', [], ['a\n'])
1538
self.assertTransportMode(trans, 'dir', 0700)
1539
self.assertTransportMode(trans, 'dir/test.knit', 0600)
1540
self.assertTransportMode(trans, 'dir/test.kndx', 0600)
1542
def test_create_mode_770(self):
1543
trans = get_transport('.')
1544
if not trans._can_roundtrip_unix_modebits():
1545
# Can't roundtrip, so no need to run this test
1547
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1548
factory=None, create=True,
1549
create_parent_dir=True,
1553
knit.add_lines('revid', [], ['a\n'])
1554
self.assertTransportMode(trans, 'dir', 0770)
1555
self.assertTransportMode(trans, 'dir/test.knit', 0660)
1556
self.assertTransportMode(trans, 'dir/test.kndx', 0660)
1558
def test_create_mode_777(self):
1559
trans = get_transport('.')
1560
if not trans._can_roundtrip_unix_modebits():
1561
# Can't roundtrip, so no need to run this test
1563
knit = KnitVersionedFile('dir/test', trans, access_mode='w',
1564
factory=None, create=True,
1565
create_parent_dir=True,
1569
knit.add_lines('revid', [], ['a\n'])
1570
self.assertTransportMode(trans, 'dir', 0777)
1571
self.assertTransportMode(trans, 'dir/test.knit', 0666)
1572
self.assertTransportMode(trans, 'dir/test.kndx', 0666)
1574
def test_plan_merge(self):
1575
my_knit = self.make_test_knit(annotate=True)
1576
my_knit.add_lines('text1', [], split_lines(TEXT_1))
1577
my_knit.add_lines('text1a', ['text1'], split_lines(TEXT_1A))
1578
my_knit.add_lines('text1b', ['text1'], split_lines(TEXT_1B))
1579
plan = list(my_knit.plan_merge('text1a', 'text1b'))
1580
for plan_line, expected_line in zip(plan, AB_MERGE):
1581
self.assertEqual(plan_line, expected_line)
1583
def test_get_stream_empty(self):
1584
"""Get a data stream for an empty knit file."""
1585
k1 = self.make_test_knit()
1586
format, data_list, reader_callable = k1.get_data_stream([])
1587
self.assertEqual('knit-plain', format)
1588
self.assertEqual([], data_list)
1589
content = reader_callable(None)
1590
self.assertEqual('', content)
1591
self.assertIsInstance(content, str)
1593
def test_get_stream_one_version(self):
1594
"""Get a data stream for a single record out of a knit containing just
1597
k1 = self.make_test_knit()
1599
('text-a', [], TEXT_1),
1601
expected_data_list = [
1602
# version, options, length, parents
1603
('text-a', ['fulltext'], 122, []),
1605
for version_id, parents, lines in test_data:
1606
k1.add_lines(version_id, parents, split_lines(lines))
1608
format, data_list, reader_callable = k1.get_data_stream(['text-a'])
1609
self.assertEqual('knit-plain', format)
1610
self.assertEqual(expected_data_list, data_list)
1611
# There's only one record in the knit, so the content should be the
1612
# entire knit data file's contents.
1613
self.assertEqual(k1.transport.get_bytes(k1._data._access._filename),
1614
reader_callable(None))
1616
def test_get_stream_get_one_version_of_many(self):
1617
"""Get a data stream for just one version out of a knit containing many
1620
k1 = self.make_test_knit()
1621
# Insert the same data as test_knit_join, as they seem to cover a range
1622
# of cases (no parents, one parent, multiple parents).
1624
('text-a', [], TEXT_1),
1625
('text-b', ['text-a'], TEXT_1),
1626
('text-c', [], TEXT_1),
1627
('text-d', ['text-c'], TEXT_1),
1628
('text-m', ['text-b', 'text-d'], TEXT_1),
1630
expected_data_list = [
1631
# version, options, length, parents
1632
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1634
for version_id, parents, lines in test_data:
1635
k1.add_lines(version_id, parents, split_lines(lines))
1637
format, data_list, reader_callable = k1.get_data_stream(['text-m'])
1638
self.assertEqual('knit-plain', format)
1639
self.assertEqual(expected_data_list, data_list)
1640
self.assertRecordContentEqual(k1, 'text-m', reader_callable(None))
1642
def test_get_data_stream_unordered_index(self):
1643
"""Get a data stream when the knit index reports versions out of order.
1645
https://bugs.launchpad.net/bzr/+bug/164637
1647
k1 = self.make_test_knit()
1649
('text-a', [], TEXT_1),
1650
('text-b', ['text-a'], TEXT_1),
1651
('text-c', [], TEXT_1),
1652
('text-d', ['text-c'], TEXT_1),
1653
('text-m', ['text-b', 'text-d'], TEXT_1),
1655
for version_id, parents, lines in test_data:
1656
k1.add_lines(version_id, parents, split_lines(lines))
1657
# monkey-patch versions method to return out of order, as if coming
1658
# from multiple independently indexed packs
1659
original_versions = k1.versions
1660
k1.versions = lambda: reversed(original_versions())
1661
expected_data_list = [
1662
('text-a', ['fulltext'], 122, []),
1663
('text-b', ['line-delta'], 84, ['text-a'])]
1664
# now check the fulltext is first and the delta second
1665
format, data_list, _ = k1.get_data_stream(['text-a', 'text-b'])
1666
self.assertEqual('knit-plain', format)
1667
self.assertEqual(expected_data_list, data_list)
1668
# and that's true if we ask for them in the opposite order too
1669
format, data_list, _ = k1.get_data_stream(['text-b', 'text-a'])
1670
self.assertEqual(expected_data_list, data_list)
1671
# also try requesting more versions
1672
format, data_list, _ = k1.get_data_stream([
1673
'text-m', 'text-b', 'text-a'])
1675
('text-a', ['fulltext'], 122, []),
1676
('text-b', ['line-delta'], 84, ['text-a']),
1677
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1680
def test_get_stream_ghost_parent(self):
1681
"""Get a data stream for a version with a ghost parent."""
1682
k1 = self.make_test_knit()
1684
k1.add_lines('text-a', [], split_lines(TEXT_1))
1685
k1.add_lines_with_ghosts('text-b', ['text-a', 'text-ghost'],
1686
split_lines(TEXT_1))
1688
expected_data_list = [
1689
# version, options, length, parents
1690
('text-b', ['line-delta'], 84, ['text-a', 'text-ghost']),
1693
format, data_list, reader_callable = k1.get_data_stream(['text-b'])
1694
self.assertEqual('knit-plain', format)
1695
self.assertEqual(expected_data_list, data_list)
1696
self.assertRecordContentEqual(k1, 'text-b', reader_callable(None))
1698
def test_get_stream_get_multiple_records(self):
1699
"""Get a stream for multiple records of a knit."""
1700
k1 = self.make_test_knit()
1701
# Insert the same data as test_knit_join, as they seem to cover a range
1702
# of cases (no parents, one parent, multiple parents).
1704
('text-a', [], TEXT_1),
1705
('text-b', ['text-a'], TEXT_1),
1706
('text-c', [], TEXT_1),
1707
('text-d', ['text-c'], TEXT_1),
1708
('text-m', ['text-b', 'text-d'], TEXT_1),
1710
for version_id, parents, lines in test_data:
1711
k1.add_lines(version_id, parents, split_lines(lines))
1713
# This test is actually a bit strict as the order in which they're
1714
# returned is not defined. This matches the current (deterministic)
1716
expected_data_list = [
1717
# version, options, length, parents
1718
('text-d', ['line-delta'], 84, ['text-c']),
1719
('text-b', ['line-delta'], 84, ['text-a']),
1721
# Note that even though we request the revision IDs in a particular
1722
# order, the data stream may return them in any order it likes. In this
1723
# case, they'll be in the order they were inserted into the knit.
1724
format, data_list, reader_callable = k1.get_data_stream(
1725
['text-d', 'text-b'])
1726
self.assertEqual('knit-plain', format)
1727
self.assertEqual(expected_data_list, data_list)
1728
# must match order they're returned
1729
self.assertRecordContentEqual(k1, 'text-d', reader_callable(84))
1730
self.assertRecordContentEqual(k1, 'text-b', reader_callable(84))
1731
self.assertEqual('', reader_callable(None),
1732
"There should be no more bytes left to read.")
1734
def test_get_stream_all(self):
1735
"""Get a data stream for all the records in a knit.
1737
This exercises fulltext records, line-delta records, records with
1738
various numbers of parents, and reading multiple records out of the
1739
callable. These cases ought to all be exercised individually by the
1740
other test_get_stream_* tests; this test is basically just paranoia.
1742
k1 = self.make_test_knit()
1743
# Insert the same data as test_knit_join, as they seem to cover a range
1744
# of cases (no parents, one parent, multiple parents).
1746
('text-a', [], TEXT_1),
1747
('text-b', ['text-a'], TEXT_1),
1748
('text-c', [], TEXT_1),
1749
('text-d', ['text-c'], TEXT_1),
1750
('text-m', ['text-b', 'text-d'], TEXT_1),
1752
for version_id, parents, lines in test_data:
1753
k1.add_lines(version_id, parents, split_lines(lines))
1755
# This test is actually a bit strict as the order in which they're
1756
# returned is not defined. This matches the current (deterministic)
1758
expected_data_list = [
1759
# version, options, length, parents
1760
('text-a', ['fulltext'], 122, []),
1761
('text-b', ['line-delta'], 84, ['text-a']),
1762
('text-m', ['line-delta'], 84, ['text-b', 'text-d']),
1763
('text-c', ['fulltext'], 121, []),
1764
('text-d', ['line-delta'], 84, ['text-c']),
1766
format, data_list, reader_callable = k1.get_data_stream(
1767
['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
1768
self.assertEqual('knit-plain', format)
1769
self.assertEqual(expected_data_list, data_list)
1770
for version_id, options, length, parents in expected_data_list:
1771
bytes = reader_callable(length)
1772
self.assertRecordContentEqual(k1, version_id, bytes)
1774
def assertKnitFilesEqual(self, knit1, knit2):
1775
"""Assert that the contents of the index and data files of two knits are
1779
knit1.transport.get_bytes(knit1._data._access._filename),
1780
knit2.transport.get_bytes(knit2._data._access._filename))
1782
knit1.transport.get_bytes(knit1._index._filename),
1783
knit2.transport.get_bytes(knit2._index._filename))
1785
def assertKnitValuesEqual(self, left, right):
1786
"""Assert that the texts, annotations and graph of left and right are
1789
self.assertEqual(set(left.versions()), set(right.versions()))
1790
for version in left.versions():
1791
self.assertEqual(left.get_parents_with_ghosts(version),
1792
right.get_parents_with_ghosts(version))
1793
self.assertEqual(left.get_lines(version),
1794
right.get_lines(version))
1795
self.assertEqual(left.annotate(version),
1796
right.annotate(version))
1798
def test_insert_data_stream_empty(self):
1799
"""Inserting a data stream with no records should not put any data into
1802
k1 = self.make_test_knit()
1803
k1.insert_data_stream(
1804
(k1.get_format_signature(), [], lambda ignored: ''))
1805
self.assertEqual('', k1.transport.get_bytes(k1._data._access._filename),
1806
"The .knit should be completely empty.")
1807
self.assertEqual(k1._index.HEADER,
1808
k1.transport.get_bytes(k1._index._filename),
1809
"The .kndx should have nothing apart from the header.")
1811
def test_insert_data_stream_one_record(self):
1812
"""Inserting a data stream with one record from a knit with one record
1813
results in byte-identical files.
1815
source = self.make_test_knit(name='source')
1816
source.add_lines('text-a', [], split_lines(TEXT_1))
1817
data_stream = source.get_data_stream(['text-a'])
1818
target = self.make_test_knit(name='target')
1819
target.insert_data_stream(data_stream)
1820
self.assertKnitFilesEqual(source, target)
1822
def test_insert_data_stream_annotated_unannotated(self):
1823
"""Inserting an annotated datastream to an unannotated knit works."""
1824
# case one - full texts.
1825
source = self.make_test_knit(name='source', annotate=True)
1826
target = self.make_test_knit(name='target', annotate=False)
1827
source.add_lines('text-a', [], split_lines(TEXT_1))
1828
target.insert_data_stream(source.get_data_stream(['text-a']))
1829
self.assertKnitValuesEqual(source, target)
1830
# case two - deltas.
1831
source.add_lines('text-b', ['text-a'], split_lines(TEXT_2))
1832
target.insert_data_stream(source.get_data_stream(['text-b']))
1833
self.assertKnitValuesEqual(source, target)
1835
def test_insert_data_stream_unannotated_annotated(self):
1836
"""Inserting an unannotated datastream to an annotated knit works."""
1837
# case one - full texts.
1838
source = self.make_test_knit(name='source', annotate=False)
1839
target = self.make_test_knit(name='target', annotate=True)
1840
source.add_lines('text-a', [], split_lines(TEXT_1))
1841
target.insert_data_stream(source.get_data_stream(['text-a']))
1842
self.assertKnitValuesEqual(source, target)
1843
# case two - deltas.
1844
source.add_lines('text-b', ['text-a'], split_lines(TEXT_2))
1845
target.insert_data_stream(source.get_data_stream(['text-b']))
1846
self.assertKnitValuesEqual(source, target)
1848
def test_insert_data_stream_records_already_present(self):
1849
"""Insert a data stream where some records are alreday present in the
1850
target, and some not. Only the new records are inserted.
1852
source = self.make_test_knit(name='source')
1853
target = self.make_test_knit(name='target')
1854
# Insert 'text-a' into both source and target
1855
source.add_lines('text-a', [], split_lines(TEXT_1))
1856
target.insert_data_stream(source.get_data_stream(['text-a']))
1857
# Insert 'text-b' into just the source.
1858
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1859
# Get a data stream of both text-a and text-b, and insert it.
1860
data_stream = source.get_data_stream(['text-a', 'text-b'])
1861
target.insert_data_stream(data_stream)
1862
# The source and target will now be identical. This means the text-a
1863
# record was not added a second time.
1864
self.assertKnitFilesEqual(source, target)
1866
def test_insert_data_stream_multiple_records(self):
1867
"""Inserting a data stream of all records from a knit with multiple
1868
records results in byte-identical files.
1870
source = self.make_test_knit(name='source')
1871
source.add_lines('text-a', [], split_lines(TEXT_1))
1872
source.add_lines('text-b', ['text-a'], split_lines(TEXT_1))
1873
source.add_lines('text-c', [], split_lines(TEXT_1))
1874
data_stream = source.get_data_stream(['text-a', 'text-b', 'text-c'])
1876
target = self.make_test_knit(name='target')
1877
target.insert_data_stream(data_stream)
1879
self.assertKnitFilesEqual(source, target)
1881
def test_insert_data_stream_ghost_parent(self):
1882
"""Insert a data stream with a record that has a ghost parent."""
1883
# Make a knit with a record, text-a, that has a ghost parent.
1884
source = self.make_test_knit(name='source')
1885
source.add_lines_with_ghosts('text-a', ['text-ghost'],
1886
split_lines(TEXT_1))
1887
data_stream = source.get_data_stream(['text-a'])
1889
target = self.make_test_knit(name='target')
1890
target.insert_data_stream(data_stream)
1892
self.assertKnitFilesEqual(source, target)
1894
# The target knit object is in a consistent state, i.e. the record we
1895
# just added is immediately visible.
1896
self.assertTrue(target.has_version('text-a'))
1897
self.assertTrue(target.has_ghost('text-ghost'))
1898
self.assertEqual(split_lines(TEXT_1), target.get_lines('text-a'))
1900
def test_insert_data_stream_inconsistent_version_lines(self):
1901
"""Inserting a data stream which has different content for a version_id
1902
than already exists in the knit will raise KnitCorrupt.
1904
source = self.make_test_knit(name='source')
1905
target = self.make_test_knit(name='target')
1906
# Insert a different 'text-a' into both source and target
1907
source.add_lines('text-a', [], split_lines(TEXT_1))
1908
target.add_lines('text-a', [], split_lines(TEXT_2))
1909
# Insert a data stream with conflicting content into the target
1910
data_stream = source.get_data_stream(['text-a'])
1912
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1914
def test_insert_data_stream_inconsistent_version_parents(self):
1915
"""Inserting a data stream which has different parents for a version_id
1916
than already exists in the knit will raise KnitCorrupt.
1918
source = self.make_test_knit(name='source')
1919
target = self.make_test_knit(name='target')
1920
# Insert a different 'text-a' into both source and target. They differ
1921
# only by the parents list, the content is the same.
1922
source.add_lines_with_ghosts('text-a', [], split_lines(TEXT_1))
1923
target.add_lines_with_ghosts('text-a', ['a-ghost'], split_lines(TEXT_1))
1924
# Insert a data stream with conflicting content into the target
1925
data_stream = source.get_data_stream(['text-a'])
1927
errors.KnitCorrupt, target.insert_data_stream, data_stream)
1929
def test_insert_data_stream_unknown_format(self):
1930
"""A data stream in a different format to the target knit cannot be
1933
It will raise KnitDataStreamUnknown because the fallback code will fail
1934
to make a knit. In future we may need KnitDataStreamIncompatible again,
1935
for more exotic cases.
1937
data_stream = ('fake-format-signature', [], lambda _: '')
1938
target = self.make_test_knit(name='target')
1940
errors.KnitDataStreamUnknown,
1941
target.insert_data_stream, data_stream)
1943
# * test that a stream of "already present version, then new version"
1944
# inserts correctly.
1947
def assertMadeStreamKnit(self, source_knit, versions, target_knit):
1948
"""Assert that a knit made from a stream is as expected."""
1949
a_stream = source_knit.get_data_stream(versions)
1950
expected_data = a_stream[2](None)
1951
a_stream = source_knit.get_data_stream(versions)
1952
a_knit = target_knit._knit_from_datastream(a_stream)
1953
self.assertEqual(source_knit.factory.__class__,
1954
a_knit.factory.__class__)
1955
self.assertIsInstance(a_knit._data._access, _StreamAccess)
1956
self.assertIsInstance(a_knit._index, _StreamIndex)
1957
self.assertEqual(a_knit._index.data_list, a_stream[1])
1958
self.assertEqual(a_knit._data._access.data, expected_data)
1959
self.assertEqual(a_knit.filename, target_knit.filename)
1960
self.assertEqual(a_knit.transport, target_knit.transport)
1961
self.assertEqual(a_knit._index, a_knit._data._access.stream_index)
1962
self.assertEqual(target_knit, a_knit._data._access.backing_knit)
1963
self.assertIsInstance(a_knit._data._access.orig_factory,
1964
source_knit.factory.__class__)
1966
def test__knit_from_data_stream_empty(self):
1967
"""Create a knit object from a datastream."""
1968
annotated = self.make_test_knit(name='source', annotate=True)
1969
plain = self.make_test_knit(name='target', annotate=False)
1970
# case 1: annotated source
1971
self.assertMadeStreamKnit(annotated, [], annotated)
1972
self.assertMadeStreamKnit(annotated, [], plain)
1973
# case 2: plain source
1974
self.assertMadeStreamKnit(plain, [], annotated)
1975
self.assertMadeStreamKnit(plain, [], plain)
1977
def test__knit_from_data_stream_unknown_format(self):
1978
annotated = self.make_test_knit(name='source', annotate=True)
1979
self.assertRaises(errors.KnitDataStreamUnknown,
1980
annotated._knit_from_datastream, ("unknown", None, None))
1992
Banana cup cake recipe
1998
- self-raising flour
2002
Banana cup cake recipe
2004
- bananas (do not use plantains!!!)
2011
Banana cup cake recipe
2014
- self-raising flour
2027
AB_MERGE_TEXT="""unchanged|Banana cup cake recipe
2032
new-b|- bananas (do not use plantains!!!)
2033
unchanged|- broken tea cups
2034
new-a|- self-raising flour
2037
AB_MERGE=[tuple(l.split('|')) for l in AB_MERGE_TEXT.splitlines(True)]
2040
def line_delta(from_lines, to_lines):
2041
"""Generate line-based delta from one text to another"""
2042
s = difflib.SequenceMatcher(None, from_lines, to_lines)
2043
for op in s.get_opcodes():
2044
if op[0] == 'equal':
2046
yield '%d,%d,%d\n' % (op[1], op[2], op[4]-op[3])
2047
for i in range(op[3], op[4]):
2051
def apply_line_delta(basis_lines, delta_lines):
2052
"""Apply a line-based perfect diff
2054
basis_lines -- text to apply the patch to
2055
delta_lines -- diff instructions and content
2057
out = basis_lines[:]
2060
while i < len(delta_lines):
2062
a, b, c = map(long, l.split(','))
2064
out[offset+a:offset+b] = delta_lines[i:i+c]
2066
offset = offset + (b - a) + c
2070
class TestWeaveToKnit(KnitTests):
2072
def test_weave_to_knit_matches(self):
2073
# check that the WeaveToKnit is_compatible function
2074
# registers True for a Weave to a Knit.
2076
k = self.make_test_knit()
2077
self.failUnless(WeaveToKnit.is_compatible(w, k))
2078
self.failIf(WeaveToKnit.is_compatible(k, w))
2079
self.failIf(WeaveToKnit.is_compatible(w, w))
2080
self.failIf(WeaveToKnit.is_compatible(k, k))
2083
class TestKnitCaching(KnitTests):
2085
def create_knit(self):
2086
k = self.make_test_knit(True)
2087
k.add_lines('text-1', [], split_lines(TEXT_1))
2088
k.add_lines('text-2', [], split_lines(TEXT_2))
2091
def test_no_caching(self):
2092
k = self.create_knit()
2093
# Nothing should be cached without setting 'enable_cache'
2094
self.assertEqual({}, k._data._cache)
2096
def test_cache_data_read_raw(self):
2097
k = self.create_knit()
2099
# Now cache and read
2102
def read_one_raw(version):
2103
pos_map = k._get_components_positions([version])
2104
method, index_memo, next = pos_map[version]
2105
lst = list(k._data.read_records_iter_raw([(version, index_memo)]))
2106
self.assertEqual(1, len(lst))
2109
val = read_one_raw('text-1')
2110
self.assertEqual({'text-1':val[1]}, k._data._cache)
2113
# After clear, new reads are not cached
2114
self.assertEqual({}, k._data._cache)
2116
val2 = read_one_raw('text-1')
2117
self.assertEqual(val, val2)
2118
self.assertEqual({}, k._data._cache)
2120
def test_cache_data_read(self):
2121
k = self.create_knit()
2123
def read_one(version):
2124
pos_map = k._get_components_positions([version])
2125
method, index_memo, next = pos_map[version]
2126
lst = list(k._data.read_records_iter([(version, index_memo)]))
2127
self.assertEqual(1, len(lst))
2130
# Now cache and read
2133
val = read_one('text-2')
2134
self.assertEqual(['text-2'], k._data._cache.keys())
2135
self.assertEqual('text-2', val[0])
2136
content, digest = k._data._parse_record('text-2',
2137
k._data._cache['text-2'])
2138
self.assertEqual(content, val[1])
2139
self.assertEqual(digest, val[2])
2142
self.assertEqual({}, k._data._cache)
2144
val2 = read_one('text-2')
2145
self.assertEqual(val, val2)
2146
self.assertEqual({}, k._data._cache)
2148
def test_cache_read(self):
2149
k = self.create_knit()
2152
text = k.get_text('text-1')
2153
self.assertEqual(TEXT_1, text)
2154
self.assertEqual(['text-1'], k._data._cache.keys())
2157
self.assertEqual({}, k._data._cache)
2159
text = k.get_text('text-1')
2160
self.assertEqual(TEXT_1, text)
2161
self.assertEqual({}, k._data._cache)
1505
2164
class TestKnitIndex(KnitTests):
1633
2283
add_callback = self.catch_add
1635
2285
add_callback = None
1636
return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
2286
return KnitGraphIndex(combined_index, deltas=deltas,
1637
2287
add_callback=add_callback)
1639
def test_keys(self):
1640
index = self.two_graph_index()
1641
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2289
def test_get_graph(self):
2290
index = self.two_graph_index()
2291
self.assertEqual(set([
2292
('tip', ('parent', )),
2294
('parent', ('tail', 'ghost')),
2296
]), set(index.get_graph()))
2298
def test_get_ancestry(self):
2299
# get_ancestry is defined as eliding ghosts, not erroring.
2300
index = self.two_graph_index()
2301
self.assertEqual([], index.get_ancestry([]))
2302
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2303
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2304
self.assertEqual(['tail', 'parent'], index.get_ancestry(['parent']))
2305
self.assertEqual(['tail', 'parent', 'tip'], index.get_ancestry(['tip']))
2306
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2307
(['tail', 'parent', 'tip', 'separate'],
2308
['separate', 'tail', 'parent', 'tip'],
2310
# and without topo_sort
2311
self.assertEqual(set(['separate']),
2312
set(index.get_ancestry(['separate'], topo_sorted=False)))
2313
self.assertEqual(set(['tail']),
2314
set(index.get_ancestry(['tail'], topo_sorted=False)))
2315
self.assertEqual(set(['tail', 'parent']),
2316
set(index.get_ancestry(['parent'], topo_sorted=False)))
2317
self.assertEqual(set(['tail', 'parent', 'tip']),
2318
set(index.get_ancestry(['tip'], topo_sorted=False)))
2319
self.assertEqual(set(['separate', 'tail', 'parent', 'tip']),
2320
set(index.get_ancestry(['tip', 'separate'])))
2321
# asking for a ghost makes it go boom.
2322
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2324
def test_get_ancestry_with_ghosts(self):
2325
index = self.two_graph_index()
2326
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2327
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2328
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2329
self.assertTrue(index.get_ancestry_with_ghosts(['parent']) in
2330
(['tail', 'ghost', 'parent'],
2331
['ghost', 'tail', 'parent'],
2333
self.assertTrue(index.get_ancestry_with_ghosts(['tip']) in
2334
(['tail', 'ghost', 'parent', 'tip'],
2335
['ghost', 'tail', 'parent', 'tip'],
2337
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2338
(['tail', 'ghost', 'parent', 'tip', 'separate'],
2339
['ghost', 'tail', 'parent', 'tip', 'separate'],
2340
['separate', 'tail', 'ghost', 'parent', 'tip'],
2341
['separate', 'ghost', 'tail', 'parent', 'tip'],
2343
# asking for a ghost makes it go boom.
2344
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2346
def test_num_versions(self):
2347
index = self.two_graph_index()
2348
self.assertEqual(4, index.num_versions())
2350
def test_get_versions(self):
2351
index = self.two_graph_index()
2352
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2353
set(index.get_versions()))
2355
def test_has_version(self):
2356
index = self.two_graph_index()
2357
self.assertTrue(index.has_version('tail'))
2358
self.assertFalse(index.has_version('ghost'))
1644
2360
def test_get_position(self):
1645
2361
index = self.two_graph_index()
1646
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
1647
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
2362
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2363
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1649
2365
def test_get_method_deltas(self):
1650
2366
index = self.two_graph_index(deltas=True)
1651
self.assertEqual('fulltext', index.get_method(('tip',)))
1652
self.assertEqual('line-delta', index.get_method(('parent',)))
2367
self.assertEqual('fulltext', index.get_method('tip'))
2368
self.assertEqual('line-delta', index.get_method('parent'))
1654
2370
def test_get_method_no_deltas(self):
1655
2371
# check that the parent-history lookup is ignored with deltas=False.
1656
2372
index = self.two_graph_index(deltas=False)
1657
self.assertEqual('fulltext', index.get_method(('tip',)))
1658
self.assertEqual('fulltext', index.get_method(('parent',)))
2373
self.assertEqual('fulltext', index.get_method('tip'))
2374
self.assertEqual('fulltext', index.get_method('parent'))
1660
2376
def test_get_options_deltas(self):
1661
2377
index = self.two_graph_index(deltas=True)
1662
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1663
self.assertEqual(['line-delta'], index.get_options(('parent',)))
2378
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2379
self.assertEqual(['line-delta'], index.get_options('parent'))
1665
2381
def test_get_options_no_deltas(self):
1666
2382
# check that the parent-history lookup is ignored with deltas=False.
1667
2383
index = self.two_graph_index(deltas=False)
1668
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1669
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1671
def test_get_parent_map(self):
1672
index = self.two_graph_index()
1673
self.assertEqual({('parent',):(('tail',), ('ghost',))},
1674
index.get_parent_map([('parent',), ('ghost',)]))
2384
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2385
self.assertEqual(['fulltext'], index.get_options('parent'))
2387
def test_get_parents(self):
2388
# get_parents ignores ghosts
2389
index = self.two_graph_index()
2390
self.assertEqual(('tail', ), index.get_parents('parent'))
2391
# and errors on ghosts.
2392
self.assertRaises(errors.RevisionNotPresent,
2393
index.get_parents, 'ghost')
2395
def test_get_parents_with_ghosts(self):
2396
index = self.two_graph_index()
2397
self.assertEqual(('tail', 'ghost'), index.get_parents_with_ghosts('parent'))
2398
# and errors on ghosts.
2399
self.assertRaises(errors.RevisionNotPresent,
2400
index.get_parents_with_ghosts, 'ghost')
2402
def test_check_versions_present(self):
2403
# ghosts should not be considered present
2404
index = self.two_graph_index()
2405
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2407
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2409
index.check_versions_present(['tail', 'separate'])
1676
2411
def catch_add(self, entries):
1677
2412
self.caught_entries.append(entries)
1679
2414
def test_add_no_callback_errors(self):
1680
2415
index = self.two_graph_index()
1681
self.assertRaises(errors.ReadOnlyError, index.add_records,
1682
[(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
2416
self.assertRaises(errors.ReadOnlyError, index.add_version,
2417
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1684
2419
def test_add_version_smoke(self):
1685
2420
index = self.two_graph_index(catch_adds=True)
1686
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
2421
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1688
2422
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
1689
2423
self.caught_entries)
1691
2425
def test_add_version_delta_not_delta_index(self):
1692
2426
index = self.two_graph_index(catch_adds=True)
1693
self.assertRaises(errors.KnitCorrupt, index.add_records,
1694
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2427
self.assertRaises(errors.KnitCorrupt, index.add_version,
2428
'new', 'no-eol,line-delta', (None, 0, 100), ['parent'])
1695
2429
self.assertEqual([], self.caught_entries)
1697
2431
def test_add_version_same_dup(self):
1698
2432
index = self.two_graph_index(catch_adds=True)
1699
2433
# options can be spelt two different ways
1700
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1701
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1702
# position/length are ignored (because each pack could have fulltext or
1703
# delta, and be at a different position.
1704
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1706
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1708
# but neither should have added data:
1709
self.assertEqual([[], [], [], []], self.caught_entries)
2434
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2435
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])
2436
# but neither should have added data.
2437
self.assertEqual([[], []], self.caught_entries)
1711
2439
def test_add_version_different_dup(self):
1712
2440
index = self.two_graph_index(deltas=True, catch_adds=True)
1713
2441
# change options
1714
self.assertRaises(errors.KnitCorrupt, index.add_records,
1715
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1716
self.assertRaises(errors.KnitCorrupt, index.add_records,
1717
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2442
self.assertRaises(errors.KnitCorrupt, index.add_version,
2443
'tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])
2444
self.assertRaises(errors.KnitCorrupt, index.add_version,
2445
'tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])
2446
self.assertRaises(errors.KnitCorrupt, index.add_version,
2447
'tip', 'fulltext', (None, 0, 100), ['parent'])
2449
self.assertRaises(errors.KnitCorrupt, index.add_version,
2450
'tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])
2451
self.assertRaises(errors.KnitCorrupt, index.add_version,
2452
'tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])
1719
self.assertRaises(errors.KnitCorrupt, index.add_records,
1720
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2454
self.assertRaises(errors.KnitCorrupt, index.add_version,
2455
'tip', 'fulltext,no-eol', (None, 0, 100), [])
1721
2456
self.assertEqual([], self.caught_entries)
1723
2458
def test_add_versions_nodeltas(self):
1724
2459
index = self.two_graph_index(catch_adds=True)
1726
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1727
(('new2',), 'fulltext', (None, 0, 6), [('new',)]),
2460
index.add_versions([
2461
('new', 'fulltext,no-eol', (None, 50, 60), ['separate']),
2462
('new2', 'fulltext', (None, 0, 6), ['new']),
1729
2464
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
1730
2465
(('new2', ), ' 0 6', ((('new',),),))],
1745
2480
def test_add_versions_delta_not_delta_index(self):
1746
2481
index = self.two_graph_index(catch_adds=True)
1747
self.assertRaises(errors.KnitCorrupt, index.add_records,
1748
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2482
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2483
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1749
2484
self.assertEqual([], self.caught_entries)
1751
2486
def test_add_versions_random_id_accepted(self):
1752
2487
index = self.two_graph_index(catch_adds=True)
1753
index.add_records([], random_id=True)
2488
index.add_versions([], random_id=True)
1755
2490
def test_add_versions_same_dup(self):
1756
2491
index = self.two_graph_index(catch_adds=True)
1757
2492
# options can be spelt two different ways
1758
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
1760
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
1762
# position/length are ignored (because each pack could have fulltext or
1763
# delta, and be at a different position.
1764
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1766
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
2493
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2494
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
1768
2495
# but neither should have added data.
1769
self.assertEqual([[], [], [], []], self.caught_entries)
2496
self.assertEqual([[], []], self.caught_entries)
1771
2498
def test_add_versions_different_dup(self):
1772
2499
index = self.two_graph_index(deltas=True, catch_adds=True)
1773
2500
# change options
1774
self.assertRaises(errors.KnitCorrupt, index.add_records,
1775
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1776
self.assertRaises(errors.KnitCorrupt, index.add_records,
1777
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
2501
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2502
[('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2503
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2504
[('tip', 'line-delta,no-eol', (None, 0, 100), ['parent'])])
2505
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2506
[('tip', 'fulltext', (None, 0, 100), ['parent'])])
2508
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2509
[('tip', 'fulltext,no-eol', (None, 50, 100), ['parent'])])
2510
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2511
[('tip', 'fulltext,no-eol', (None, 0, 1000), ['parent'])])
1779
self.assertRaises(errors.KnitCorrupt, index.add_records,
1780
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2513
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2514
[('tip', 'fulltext,no-eol', (None, 0, 100), [])])
1781
2515
# change options in the second record
1782
self.assertRaises(errors.KnitCorrupt, index.add_records,
1783
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
1784
(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
2516
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2517
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent']),
2518
('tip', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
1785
2519
self.assertEqual([], self.caught_entries)
1787
def make_g_index_missing_compression_parent(self):
1788
graph_index = self.make_g_index('missing_comp', 2,
1789
[(('tip', ), ' 100 78',
1790
([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1793
def make_g_index_missing_parent(self):
1794
graph_index = self.make_g_index('missing_parent', 2,
1795
[(('parent', ), ' 100 78', ([], [])),
1796
(('tip', ), ' 100 78',
1797
([('parent', ), ('missing-parent', )], [('parent', )])),
1801
def make_g_index_no_external_refs(self):
1802
graph_index = self.make_g_index('no_external_refs', 2,
1803
[(('rev', ), ' 100 78',
1804
([('parent', ), ('ghost', )], []))])
1807
def test_add_good_unvalidated_index(self):
1808
unvalidated = self.make_g_index_no_external_refs()
1809
combined = CombinedGraphIndex([unvalidated])
1810
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1811
index.scan_unvalidated_index(unvalidated)
1812
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1814
def test_add_missing_compression_parent_unvalidated_index(self):
1815
unvalidated = self.make_g_index_missing_compression_parent()
1816
combined = CombinedGraphIndex([unvalidated])
1817
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1818
index.scan_unvalidated_index(unvalidated)
1819
# This also checks that its only the compression parent that is
1820
# examined, otherwise 'ghost' would also be reported as a missing
1823
frozenset([('missing-parent',)]),
1824
index.get_missing_compression_parents())
1826
def test_add_missing_noncompression_parent_unvalidated_index(self):
1827
unvalidated = self.make_g_index_missing_parent()
1828
combined = CombinedGraphIndex([unvalidated])
1829
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1830
track_external_parent_refs=True)
1831
index.scan_unvalidated_index(unvalidated)
1833
frozenset([('missing-parent',)]), index.get_missing_parents())
1835
def test_track_external_parent_refs(self):
1836
g_index = self.make_g_index('empty', 2, [])
1837
combined = CombinedGraphIndex([g_index])
1838
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1839
add_callback=self.catch_add, track_external_parent_refs=True)
1840
self.caught_entries = []
1842
(('new-key',), 'fulltext,no-eol', (None, 50, 60),
1843
[('parent-1',), ('parent-2',)])])
1845
frozenset([('parent-1',), ('parent-2',)]),
1846
index.get_missing_parents())
1848
def test_add_unvalidated_index_with_present_external_references(self):
1849
index = self.two_graph_index(deltas=True)
1850
# Ugly hack to get at one of the underlying GraphIndex objects that
1851
# two_graph_index built.
1852
unvalidated = index._graph_index._indices[1]
1853
# 'parent' is an external ref of _indices[1] (unvalidated), but is
1854
# present in _indices[0].
1855
index.scan_unvalidated_index(unvalidated)
1856
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1858
def make_new_missing_parent_g_index(self, name):
1859
missing_parent = name + '-missing-parent'
1860
graph_index = self.make_g_index(name, 2,
1861
[((name + 'tip', ), ' 100 78',
1862
([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
1865
def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
1866
g_index_1 = self.make_new_missing_parent_g_index('one')
1867
g_index_2 = self.make_new_missing_parent_g_index('two')
1868
combined = CombinedGraphIndex([g_index_1, g_index_2])
1869
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1870
index.scan_unvalidated_index(g_index_1)
1871
index.scan_unvalidated_index(g_index_2)
1873
frozenset([('one-missing-parent',), ('two-missing-parent',)]),
1874
index.get_missing_compression_parents())
1876
def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
1877
graph_index_a = self.make_g_index('one', 2,
1878
[(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
1879
(('child-of-two', ), ' 100 78',
1880
([('parent-two',)], [('parent-two',)]))])
1881
graph_index_b = self.make_g_index('two', 2,
1882
[(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
1883
(('child-of-one', ), ' 100 78',
1884
([('parent-one',)], [('parent-one',)]))])
1885
combined = CombinedGraphIndex([graph_index_a, graph_index_b])
1886
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1887
index.scan_unvalidated_index(graph_index_a)
1888
index.scan_unvalidated_index(graph_index_b)
1890
frozenset([]), index.get_missing_compression_parents())
2521
def test_iter_parents(self):
2522
index1 = self.make_g_index('1', 1, [
2524
(('r0', ), 'N0 100', ([], )),
2526
(('r1', ), '', ([('r0', )], ))])
2527
index2 = self.make_g_index('2', 1, [
2529
(('r2', ), 'N0 100', ([('r1', ), ('r0', )], )),
2531
combined_index = CombinedGraphIndex([index1, index2])
2532
index = KnitGraphIndex(combined_index)
2534
# cases: each sample data individually:
2535
self.assertEqual(set([('r0', ())]),
2536
set(index.iter_parents(['r0'])))
2537
self.assertEqual(set([('r1', ('r0', ))]),
2538
set(index.iter_parents(['r1'])))
2539
self.assertEqual(set([('r2', ('r1', 'r0'))]),
2540
set(index.iter_parents(['r2'])))
2541
# no nodes returned for a missing node
2542
self.assertEqual(set(),
2543
set(index.iter_parents(['missing'])))
2544
# 1 node returned with missing nodes skipped
2545
self.assertEqual(set([('r1', ('r0', ))]),
2546
set(index.iter_parents(['ghost1', 'r1', 'ghost'])))
2548
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2549
set(index.iter_parents(['r0', 'r1'])))
2550
# 2 nodes returned, missing skipped
2551
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
2552
set(index.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
1893
2555
class TestNoParentsGraphIndexKnit(KnitTests):
1894
"""Tests for knits using _KnitGraphIndex with no parents."""
2556
"""Tests for knits using KnitGraphIndex with no parents."""
1896
2558
def make_g_index(self, name, ref_lists=0, nodes=[]):
1897
2559
builder = GraphIndexBuilder(ref_lists)
1935
2589
add_callback = self.catch_add
1937
2591
add_callback = None
1938
return _KnitGraphIndex(combined_index, lambda:True, parents=False,
2592
return KnitGraphIndex(combined_index, parents=False,
1939
2593
add_callback=add_callback)
1941
def test_keys(self):
1942
index = self.two_graph_index()
1943
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
2595
def test_get_graph(self):
2596
index = self.two_graph_index()
2597
self.assertEqual(set([
2602
]), set(index.get_graph()))
2604
def test_get_ancestry(self):
2605
# with no parents, ancestry is always just the key.
2606
index = self.two_graph_index()
2607
self.assertEqual([], index.get_ancestry([]))
2608
self.assertEqual(['separate'], index.get_ancestry(['separate']))
2609
self.assertEqual(['tail'], index.get_ancestry(['tail']))
2610
self.assertEqual(['parent'], index.get_ancestry(['parent']))
2611
self.assertEqual(['tip'], index.get_ancestry(['tip']))
2612
self.assertTrue(index.get_ancestry(['tip', 'separate']) in
2613
(['tip', 'separate'],
2614
['separate', 'tip'],
2616
# asking for a ghost makes it go boom.
2617
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry, ['ghost'])
2619
def test_get_ancestry_with_ghosts(self):
2620
index = self.two_graph_index()
2621
self.assertEqual([], index.get_ancestry_with_ghosts([]))
2622
self.assertEqual(['separate'], index.get_ancestry_with_ghosts(['separate']))
2623
self.assertEqual(['tail'], index.get_ancestry_with_ghosts(['tail']))
2624
self.assertEqual(['parent'], index.get_ancestry_with_ghosts(['parent']))
2625
self.assertEqual(['tip'], index.get_ancestry_with_ghosts(['tip']))
2626
self.assertTrue(index.get_ancestry_with_ghosts(['tip', 'separate']) in
2627
(['tip', 'separate'],
2628
['separate', 'tip'],
2630
# asking for a ghost makes it go boom.
2631
self.assertRaises(errors.RevisionNotPresent, index.get_ancestry_with_ghosts, ['ghost'])
2633
def test_num_versions(self):
2634
index = self.two_graph_index()
2635
self.assertEqual(4, index.num_versions())
2637
def test_get_versions(self):
2638
index = self.two_graph_index()
2639
self.assertEqual(set(['tail', 'tip', 'parent', 'separate']),
2640
set(index.get_versions()))
2642
def test_has_version(self):
2643
index = self.two_graph_index()
2644
self.assertTrue(index.has_version('tail'))
2645
self.assertFalse(index.has_version('ghost'))
1946
2647
def test_get_position(self):
1947
2648
index = self.two_graph_index()
1948
self.assertEqual((index._graph_index._indices[0], 0, 100),
1949
index.get_position(('tip',)))
1950
self.assertEqual((index._graph_index._indices[1], 100, 78),
1951
index.get_position(('parent',)))
2649
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position('tip'))
2650
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position('parent'))
1953
2652
def test_get_method(self):
1954
2653
index = self.two_graph_index()
1955
self.assertEqual('fulltext', index.get_method(('tip',)))
1956
self.assertEqual(['fulltext'], index.get_options(('parent',)))
2654
self.assertEqual('fulltext', index.get_method('tip'))
2655
self.assertEqual(['fulltext'], index.get_options('parent'))
1958
2657
def test_get_options(self):
1959
2658
index = self.two_graph_index()
1960
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1961
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1963
def test_get_parent_map(self):
1964
index = self.two_graph_index()
1965
self.assertEqual({('parent',):None},
1966
index.get_parent_map([('parent',), ('ghost',)]))
2659
self.assertEqual(['fulltext', 'no-eol'], index.get_options('tip'))
2660
self.assertEqual(['fulltext'], index.get_options('parent'))
2662
def test_get_parents(self):
2663
index = self.two_graph_index()
2664
self.assertEqual((), index.get_parents('parent'))
2665
# and errors on ghosts.
2666
self.assertRaises(errors.RevisionNotPresent,
2667
index.get_parents, 'ghost')
2669
def test_get_parents_with_ghosts(self):
2670
index = self.two_graph_index()
2671
self.assertEqual((), index.get_parents_with_ghosts('parent'))
2672
# and errors on ghosts.
2673
self.assertRaises(errors.RevisionNotPresent,
2674
index.get_parents_with_ghosts, 'ghost')
2676
def test_check_versions_present(self):
2677
index = self.two_graph_index()
2678
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2680
self.assertRaises(RevisionNotPresent, index.check_versions_present,
2681
['tail', 'missing'])
2682
index.check_versions_present(['tail', 'separate'])
1968
2684
def catch_add(self, entries):
1969
2685
self.caught_entries.append(entries)
1971
2687
def test_add_no_callback_errors(self):
1972
2688
index = self.two_graph_index()
1973
self.assertRaises(errors.ReadOnlyError, index.add_records,
1974
[(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)])])
2689
self.assertRaises(errors.ReadOnlyError, index.add_version,
2690
'new', 'fulltext,no-eol', (None, 50, 60), ['separate'])
1976
2692
def test_add_version_smoke(self):
1977
2693
index = self.two_graph_index(catch_adds=True)
1978
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60), [])])
2694
index.add_version('new', 'fulltext,no-eol', (None, 50, 60), [])
1979
2695
self.assertEqual([[(('new', ), 'N50 60')]],
1980
2696
self.caught_entries)
1982
2698
def test_add_version_delta_not_delta_index(self):
1983
2699
index = self.two_graph_index(catch_adds=True)
1984
self.assertRaises(errors.KnitCorrupt, index.add_records,
1985
[(('new',), 'no-eol,line-delta', (None, 0, 100), [])])
2700
self.assertRaises(errors.KnitCorrupt, index.add_version,
2701
'new', 'no-eol,line-delta', (None, 0, 100), [])
1986
2702
self.assertEqual([], self.caught_entries)
1988
2704
def test_add_version_same_dup(self):
1989
2705
index = self.two_graph_index(catch_adds=True)
1990
2706
# options can be spelt two different ways
1991
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1992
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
1993
# position/length are ignored (because each pack could have fulltext or
1994
# delta, and be at a different position.
1995
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
1996
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2707
index.add_version('tip', 'fulltext,no-eol', (None, 0, 100), [])
2708
index.add_version('tip', 'no-eol,fulltext', (None, 0, 100), [])
1997
2709
# but neither should have added data.
1998
self.assertEqual([[], [], [], []], self.caught_entries)
2710
self.assertEqual([[], []], self.caught_entries)
2000
2712
def test_add_version_different_dup(self):
2001
2713
index = self.two_graph_index(catch_adds=True)
2002
2714
# change options
2003
self.assertRaises(errors.KnitCorrupt, index.add_records,
2004
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2005
self.assertRaises(errors.KnitCorrupt, index.add_records,
2006
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2007
self.assertRaises(errors.KnitCorrupt, index.add_records,
2008
[(('tip',), 'fulltext', (None, 0, 100), [])])
2715
self.assertRaises(errors.KnitCorrupt, index.add_version,
2716
'tip', 'no-eol,line-delta', (None, 0, 100), [])
2717
self.assertRaises(errors.KnitCorrupt, index.add_version,
2718
'tip', 'line-delta,no-eol', (None, 0, 100), [])
2719
self.assertRaises(errors.KnitCorrupt, index.add_version,
2720
'tip', 'fulltext', (None, 0, 100), [])
2722
self.assertRaises(errors.KnitCorrupt, index.add_version,
2723
'tip', 'fulltext,no-eol', (None, 50, 100), [])
2724
self.assertRaises(errors.KnitCorrupt, index.add_version,
2725
'tip', 'fulltext,no-eol', (None, 0, 1000), [])
2010
self.assertRaises(errors.KnitCorrupt, index.add_records,
2011
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2727
self.assertRaises(errors.KnitCorrupt, index.add_version,
2728
'tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])
2012
2729
self.assertEqual([], self.caught_entries)
2014
2731
def test_add_versions(self):
2015
2732
index = self.two_graph_index(catch_adds=True)
2017
(('new',), 'fulltext,no-eol', (None, 50, 60), []),
2018
(('new2',), 'fulltext', (None, 0, 6), []),
2733
index.add_versions([
2734
('new', 'fulltext,no-eol', (None, 50, 60), []),
2735
('new2', 'fulltext', (None, 0, 6), []),
2020
2737
self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
2021
2738
sorted(self.caught_entries[0]))
2024
2741
def test_add_versions_delta_not_delta_index(self):
2025
2742
index = self.two_graph_index(catch_adds=True)
2026
self.assertRaises(errors.KnitCorrupt, index.add_records,
2027
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2743
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2744
[('new', 'no-eol,line-delta', (None, 0, 100), ['parent'])])
2028
2745
self.assertEqual([], self.caught_entries)
2030
2747
def test_add_versions_parents_not_parents_index(self):
2031
2748
index = self.two_graph_index(catch_adds=True)
2032
self.assertRaises(errors.KnitCorrupt, index.add_records,
2033
[(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
2749
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2750
[('new', 'no-eol,fulltext', (None, 0, 100), ['parent'])])
2034
2751
self.assertEqual([], self.caught_entries)
2036
2753
def test_add_versions_random_id_accepted(self):
2037
2754
index = self.two_graph_index(catch_adds=True)
2038
index.add_records([], random_id=True)
2755
index.add_versions([], random_id=True)
2040
2757
def test_add_versions_same_dup(self):
2041
2758
index = self.two_graph_index(catch_adds=True)
2042
2759
# options can be spelt two different ways
2043
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2044
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2045
# position/length are ignored (because each pack could have fulltext or
2046
# delta, and be at a different position.
2047
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2048
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2760
index.add_versions([('tip', 'fulltext,no-eol', (None, 0, 100), [])])
2761
index.add_versions([('tip', 'no-eol,fulltext', (None, 0, 100), [])])
2049
2762
# but neither should have added data.
2050
self.assertEqual([[], [], [], []], self.caught_entries)
2763
self.assertEqual([[], []], self.caught_entries)
2052
2765
def test_add_versions_different_dup(self):
2053
2766
index = self.two_graph_index(catch_adds=True)
2054
2767
# change options
2055
self.assertRaises(errors.KnitCorrupt, index.add_records,
2056
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2057
self.assertRaises(errors.KnitCorrupt, index.add_records,
2058
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2059
self.assertRaises(errors.KnitCorrupt, index.add_records,
2060
[(('tip',), 'fulltext', (None, 0, 100), [])])
2768
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2769
[('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2770
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2771
[('tip', 'line-delta,no-eol', (None, 0, 100), [])])
2772
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2773
[('tip', 'fulltext', (None, 0, 100), [])])
2775
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2776
[('tip', 'fulltext,no-eol', (None, 50, 100), [])])
2777
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2778
[('tip', 'fulltext,no-eol', (None, 0, 1000), [])])
2062
self.assertRaises(errors.KnitCorrupt, index.add_records,
2063
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2780
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2781
[('tip', 'fulltext,no-eol', (None, 0, 100), ['parent'])])
2064
2782
# change options in the second record
2065
self.assertRaises(errors.KnitCorrupt, index.add_records,
2066
[(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
2067
(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2783
self.assertRaises(errors.KnitCorrupt, index.add_versions,
2784
[('tip', 'fulltext,no-eol', (None, 0, 100), []),
2785
('tip', 'no-eol,line-delta', (None, 0, 100), [])])
2068
2786
self.assertEqual([], self.caught_entries)
2071
class TestKnitVersionedFiles(KnitTests):
2073
def assertGroupKeysForIo(self, exp_groups, keys, non_local_keys,
2074
positions, _min_buffer_size=None):
2075
kvf = self.make_test_knit()
2076
if _min_buffer_size is None:
2077
_min_buffer_size = knit._STREAM_MIN_BUFFER_SIZE
2078
self.assertEqual(exp_groups, kvf._group_keys_for_io(keys,
2079
non_local_keys, positions,
2080
_min_buffer_size=_min_buffer_size))
2082
def assertSplitByPrefix(self, expected_map, expected_prefix_order,
2084
split, prefix_order = KnitVersionedFiles._split_by_prefix(keys)
2085
self.assertEqual(expected_map, split)
2086
self.assertEqual(expected_prefix_order, prefix_order)
2088
def test__group_keys_for_io(self):
2089
ft_detail = ('fulltext', False)
2090
ld_detail = ('line-delta', False)
2098
f_a: (ft_detail, (f_a, 0, 100), None),
2099
f_b: (ld_detail, (f_b, 100, 21), f_a),
2100
f_c: (ld_detail, (f_c, 180, 15), f_b),
2101
g_a: (ft_detail, (g_a, 121, 35), None),
2102
g_b: (ld_detail, (g_b, 156, 12), g_a),
2103
g_c: (ld_detail, (g_c, 195, 13), g_a),
2105
self.assertGroupKeysForIo([([f_a], set())],
2106
[f_a], [], positions)
2107
self.assertGroupKeysForIo([([f_a], set([f_a]))],
2108
[f_a], [f_a], positions)
2109
self.assertGroupKeysForIo([([f_a, f_b], set([]))],
2110
[f_a, f_b], [], positions)
2111
self.assertGroupKeysForIo([([f_a, f_b], set([f_b]))],
2112
[f_a, f_b], [f_b], positions)
2113
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2114
[f_a, g_a, f_b, g_b], [], positions)
2115
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2116
[f_a, g_a, f_b, g_b], [], positions,
2117
_min_buffer_size=150)
2118
self.assertGroupKeysForIo([([f_a, f_b], set()), ([g_a, g_b], set())],
2119
[f_a, g_a, f_b, g_b], [], positions,
2120
_min_buffer_size=100)
2121
self.assertGroupKeysForIo([([f_c], set()), ([g_b], set())],
2122
[f_c, g_b], [], positions,
2123
_min_buffer_size=125)
2124
self.assertGroupKeysForIo([([g_b, f_c], set())],
2125
[g_b, f_c], [], positions,
2126
_min_buffer_size=125)
2128
def test__split_by_prefix(self):
2129
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2130
'g': [('g', 'b'), ('g', 'a')],
2132
[('f', 'a'), ('g', 'b'),
2133
('g', 'a'), ('f', 'b')])
2135
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2136
'g': [('g', 'b'), ('g', 'a')],
2138
[('f', 'a'), ('f', 'b'),
2139
('g', 'b'), ('g', 'a')])
2141
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2142
'g': [('g', 'b'), ('g', 'a')],
2144
[('f', 'a'), ('f', 'b'),
2145
('g', 'b'), ('g', 'a')])
2147
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2148
'g': [('g', 'b'), ('g', 'a')],
2149
'': [('a',), ('b',)]
2151
[('f', 'a'), ('g', 'b'),
2153
('g', 'a'), ('f', 'b')])
2156
class TestStacking(KnitTests):
2158
def get_basis_and_test_knit(self):
2159
basis = self.make_test_knit(name='basis')
2160
basis = RecordingVersionedFilesDecorator(basis)
2161
test = self.make_test_knit(name='test')
2162
test.add_fallback_versioned_files(basis)
2165
def test_add_fallback_versioned_files(self):
2166
basis = self.make_test_knit(name='basis')
2167
test = self.make_test_knit(name='test')
2168
# It must not error; other tests test that the fallback is referred to
2169
# when accessing data.
2170
test.add_fallback_versioned_files(basis)
2172
def test_add_lines(self):
2173
# lines added to the test are not added to the basis
2174
basis, test = self.get_basis_and_test_knit()
2176
key_basis = ('bar',)
2177
key_cross_border = ('quux',)
2178
key_delta = ('zaphod',)
2179
test.add_lines(key, (), ['foo\n'])
2180
self.assertEqual({}, basis.get_parent_map([key]))
2181
# lines added to the test that reference across the stack do a
2183
basis.add_lines(key_basis, (), ['foo\n'])
2185
test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
2186
self.assertEqual('fulltext', test._index.get_method(key_cross_border))
2187
# we don't even need to look at the basis to see that this should be
2188
# stored as a fulltext
2189
self.assertEqual([], basis.calls)
2190
# Subsequent adds do delta.
2192
test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
2193
self.assertEqual('line-delta', test._index.get_method(key_delta))
2194
self.assertEqual([], basis.calls)
2196
def test_annotate(self):
2197
# annotations from the test knit are answered without asking the basis
2198
basis, test = self.get_basis_and_test_knit()
2200
key_basis = ('bar',)
2201
key_missing = ('missing',)
2202
test.add_lines(key, (), ['foo\n'])
2203
details = test.annotate(key)
2204
self.assertEqual([(key, 'foo\n')], details)
2205
self.assertEqual([], basis.calls)
2206
# But texts that are not in the test knit are looked for in the basis
2208
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2210
details = test.annotate(key_basis)
2211
self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
2212
# Not optimised to date:
2213
# self.assertEqual([("annotate", key_basis)], basis.calls)
2214
self.assertEqual([('get_parent_map', set([key_basis])),
2215
('get_parent_map', set([key_basis])),
2216
('get_record_stream', [key_basis], 'topological', True)],
2219
def test_check(self):
2220
# At the moment checking a stacked knit does implicitly check the
2222
basis, test = self.get_basis_and_test_knit()
2225
def test_get_parent_map(self):
2226
# parents in the test knit are answered without asking the basis
2227
basis, test = self.get_basis_and_test_knit()
2229
key_basis = ('bar',)
2230
key_missing = ('missing',)
2231
test.add_lines(key, (), [])
2232
parent_map = test.get_parent_map([key])
2233
self.assertEqual({key: ()}, parent_map)
2234
self.assertEqual([], basis.calls)
2235
# But parents that are not in the test knit are looked for in the basis
2236
basis.add_lines(key_basis, (), [])
2238
parent_map = test.get_parent_map([key, key_basis, key_missing])
2239
self.assertEqual({key: (),
2240
key_basis: ()}, parent_map)
2241
self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
2244
def test_get_record_stream_unordered_fulltexts(self):
2245
# records from the test knit are answered without asking the basis:
2246
basis, test = self.get_basis_and_test_knit()
2248
key_basis = ('bar',)
2249
key_missing = ('missing',)
2250
test.add_lines(key, (), ['foo\n'])
2251
records = list(test.get_record_stream([key], 'unordered', True))
2252
self.assertEqual(1, len(records))
2253
self.assertEqual([], basis.calls)
2254
# Missing (from test knit) objects are retrieved from the basis:
2255
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2257
records = list(test.get_record_stream([key_basis, key_missing],
2259
self.assertEqual(2, len(records))
2260
calls = list(basis.calls)
2261
for record in records:
2262
self.assertSubset([record.key], (key_basis, key_missing))
2263
if record.key == key_missing:
2264
self.assertIsInstance(record, AbsentContentFactory)
2266
reference = list(basis.get_record_stream([key_basis],
2267
'unordered', True))[0]
2268
self.assertEqual(reference.key, record.key)
2269
self.assertEqual(reference.sha1, record.sha1)
2270
self.assertEqual(reference.storage_kind, record.storage_kind)
2271
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2272
record.get_bytes_as(record.storage_kind))
2273
self.assertEqual(reference.get_bytes_as('fulltext'),
2274
record.get_bytes_as('fulltext'))
2275
# It's not strictly minimal, but it seems reasonable for now for it to
2276
# ask which fallbacks have which parents.
2278
("get_parent_map", set([key_basis, key_missing])),
2279
("get_record_stream", [key_basis], 'unordered', True)],
2282
def test_get_record_stream_ordered_fulltexts(self):
2283
# ordering is preserved down into the fallback store.
2284
basis, test = self.get_basis_and_test_knit()
2286
key_basis = ('bar',)
2287
key_basis_2 = ('quux',)
2288
key_missing = ('missing',)
2289
test.add_lines(key, (key_basis,), ['foo\n'])
2290
# Missing (from test knit) objects are retrieved from the basis:
2291
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2292
basis.add_lines(key_basis_2, (), ['quux\n'])
2294
# ask for in non-topological order
2295
records = list(test.get_record_stream(
2296
[key, key_basis, key_missing, key_basis_2], 'topological', True))
2297
self.assertEqual(4, len(records))
2299
for record in records:
2300
self.assertSubset([record.key],
2301
(key_basis, key_missing, key_basis_2, key))
2302
if record.key == key_missing:
2303
self.assertIsInstance(record, AbsentContentFactory)
2305
results.append((record.key, record.sha1, record.storage_kind,
2306
record.get_bytes_as('fulltext')))
2307
calls = list(basis.calls)
2308
order = [record[0] for record in results]
2309
self.assertEqual([key_basis_2, key_basis, key], order)
2310
for result in results:
2311
if result[0] == key:
2315
record = source.get_record_stream([result[0]], 'unordered',
2317
self.assertEqual(record.key, result[0])
2318
self.assertEqual(record.sha1, result[1])
2319
# We used to check that the storage kind matched, but actually it
2320
# depends on whether it was sourced from the basis, or in a single
2321
# group, because asking for full texts returns proxy objects to a
2322
# _ContentMapGenerator object; so checking the kind is unneeded.
2323
self.assertEqual(record.get_bytes_as('fulltext'), result[3])
2324
# It's not strictly minimal, but it seems reasonable for now for it to
2325
# ask which fallbacks have which parents.
2327
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2328
# topological is requested from the fallback, because that is what
2329
# was requested at the top level.
2330
("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
2333
def test_get_record_stream_unordered_deltas(self):
2334
# records from the test knit are answered without asking the basis:
2335
basis, test = self.get_basis_and_test_knit()
2337
key_basis = ('bar',)
2338
key_missing = ('missing',)
2339
test.add_lines(key, (), ['foo\n'])
2340
records = list(test.get_record_stream([key], 'unordered', False))
2341
self.assertEqual(1, len(records))
2342
self.assertEqual([], basis.calls)
2343
# Missing (from test knit) objects are retrieved from the basis:
2344
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2346
records = list(test.get_record_stream([key_basis, key_missing],
2347
'unordered', False))
2348
self.assertEqual(2, len(records))
2349
calls = list(basis.calls)
2350
for record in records:
2351
self.assertSubset([record.key], (key_basis, key_missing))
2352
if record.key == key_missing:
2353
self.assertIsInstance(record, AbsentContentFactory)
2355
reference = list(basis.get_record_stream([key_basis],
2356
'unordered', False))[0]
2357
self.assertEqual(reference.key, record.key)
2358
self.assertEqual(reference.sha1, record.sha1)
2359
self.assertEqual(reference.storage_kind, record.storage_kind)
2360
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2361
record.get_bytes_as(record.storage_kind))
2362
# It's not strictly minimal, but it seems reasonable for now for it to
2363
# ask which fallbacks have which parents.
2365
("get_parent_map", set([key_basis, key_missing])),
2366
("get_record_stream", [key_basis], 'unordered', False)],
2369
def test_get_record_stream_ordered_deltas(self):
2370
# ordering is preserved down into the fallback store.
2371
basis, test = self.get_basis_and_test_knit()
2373
key_basis = ('bar',)
2374
key_basis_2 = ('quux',)
2375
key_missing = ('missing',)
2376
test.add_lines(key, (key_basis,), ['foo\n'])
2377
# Missing (from test knit) objects are retrieved from the basis:
2378
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2379
basis.add_lines(key_basis_2, (), ['quux\n'])
2381
# ask for in non-topological order
2382
records = list(test.get_record_stream(
2383
[key, key_basis, key_missing, key_basis_2], 'topological', False))
2384
self.assertEqual(4, len(records))
2386
for record in records:
2387
self.assertSubset([record.key],
2388
(key_basis, key_missing, key_basis_2, key))
2389
if record.key == key_missing:
2390
self.assertIsInstance(record, AbsentContentFactory)
2392
results.append((record.key, record.sha1, record.storage_kind,
2393
record.get_bytes_as(record.storage_kind)))
2394
calls = list(basis.calls)
2395
order = [record[0] for record in results]
2396
self.assertEqual([key_basis_2, key_basis, key], order)
2397
for result in results:
2398
if result[0] == key:
2402
record = source.get_record_stream([result[0]], 'unordered',
2404
self.assertEqual(record.key, result[0])
2405
self.assertEqual(record.sha1, result[1])
2406
self.assertEqual(record.storage_kind, result[2])
2407
self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
2408
# It's not strictly minimal, but it seems reasonable for now for it to
2409
# ask which fallbacks have which parents.
2411
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2412
("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
2415
def test_get_sha1s(self):
2416
# sha1's in the test knit are answered without asking the basis
2417
basis, test = self.get_basis_and_test_knit()
2419
key_basis = ('bar',)
2420
key_missing = ('missing',)
2421
test.add_lines(key, (), ['foo\n'])
2422
key_sha1sum = osutils.sha('foo\n').hexdigest()
2423
sha1s = test.get_sha1s([key])
2424
self.assertEqual({key: key_sha1sum}, sha1s)
2425
self.assertEqual([], basis.calls)
2426
# But texts that are not in the test knit are looked for in the basis
2427
# directly (rather than via text reconstruction) so that remote servers
2428
# etc don't have to answer with full content.
2429
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2430
basis_sha1sum = osutils.sha('foo\nbar\n').hexdigest()
2432
sha1s = test.get_sha1s([key, key_missing, key_basis])
2433
self.assertEqual({key: key_sha1sum,
2434
key_basis: basis_sha1sum}, sha1s)
2435
self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
2438
def test_insert_record_stream(self):
2439
# records are inserted as normal; insert_record_stream builds on
2440
# add_lines, so a smoke test should be all that's needed:
2442
key_basis = ('bar',)
2443
key_delta = ('zaphod',)
2444
basis, test = self.get_basis_and_test_knit()
2445
source = self.make_test_knit(name='source')
2446
basis.add_lines(key_basis, (), ['foo\n'])
2448
source.add_lines(key_basis, (), ['foo\n'])
2449
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2450
stream = source.get_record_stream([key_delta], 'unordered', False)
2451
test.insert_record_stream(stream)
2452
# XXX: this does somewhat too many calls in making sure of whether it
2453
# has to recreate the full text.
2454
self.assertEqual([("get_parent_map", set([key_basis])),
2455
('get_parent_map', set([key_basis])),
2456
('get_record_stream', [key_basis], 'unordered', True)],
2458
self.assertEqual({key_delta:(key_basis,)},
2459
test.get_parent_map([key_delta]))
2460
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2461
'unordered', True).next().get_bytes_as('fulltext'))
2463
def test_iter_lines_added_or_present_in_keys(self):
2464
# Lines from the basis are returned, and lines for a given key are only
2468
# all sources are asked for keys:
2469
basis, test = self.get_basis_and_test_knit()
2470
basis.add_lines(key1, (), ["foo"])
2472
lines = list(test.iter_lines_added_or_present_in_keys([key1]))
2473
self.assertEqual([("foo\n", key1)], lines)
2474
self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
2476
# keys in both are not duplicated:
2477
test.add_lines(key2, (), ["bar\n"])
2478
basis.add_lines(key2, (), ["bar\n"])
2480
lines = list(test.iter_lines_added_or_present_in_keys([key2]))
2481
self.assertEqual([("bar\n", key2)], lines)
2482
self.assertEqual([], basis.calls)
2484
def test_keys(self):
2487
# all sources are asked for keys:
2488
basis, test = self.get_basis_and_test_knit()
2490
self.assertEqual(set(), set(keys))
2491
self.assertEqual([("keys",)], basis.calls)
2492
# keys from a basis are returned:
2493
basis.add_lines(key1, (), [])
2496
self.assertEqual(set([key1]), set(keys))
2497
self.assertEqual([("keys",)], basis.calls)
2498
# keys in both are not duplicated:
2499
test.add_lines(key2, (), [])
2500
basis.add_lines(key2, (), [])
2503
self.assertEqual(2, len(keys))
2504
self.assertEqual(set([key1, key2]), set(keys))
2505
self.assertEqual([("keys",)], basis.calls)
2507
def test_add_mpdiffs(self):
2508
# records are inserted as normal; add_mpdiff builds on
2509
# add_lines, so a smoke test should be all that's needed:
2511
key_basis = ('bar',)
2512
key_delta = ('zaphod',)
2513
basis, test = self.get_basis_and_test_knit()
2514
source = self.make_test_knit(name='source')
2515
basis.add_lines(key_basis, (), ['foo\n'])
2517
source.add_lines(key_basis, (), ['foo\n'])
2518
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2519
diffs = source.make_mpdiffs([key_delta])
2520
test.add_mpdiffs([(key_delta, (key_basis,),
2521
source.get_sha1s([key_delta])[key_delta], diffs[0])])
2522
self.assertEqual([("get_parent_map", set([key_basis])),
2523
('get_record_stream', [key_basis], 'unordered', True),],
2525
self.assertEqual({key_delta:(key_basis,)},
2526
test.get_parent_map([key_delta]))
2527
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2528
'unordered', True).next().get_bytes_as('fulltext'))
2530
def test_make_mpdiffs(self):
2531
# Generating an mpdiff across a stacking boundary should detect parent
2535
key_right = ('zaphod',)
2536
basis, test = self.get_basis_and_test_knit()
2537
basis.add_lines(key_left, (), ['bar\n'])
2538
basis.add_lines(key_right, (), ['zaphod\n'])
2540
test.add_lines(key, (key_left, key_right),
2541
['bar\n', 'foo\n', 'zaphod\n'])
2542
diffs = test.make_mpdiffs([key])
2544
multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
2545
multiparent.NewText(['foo\n']),
2546
multiparent.ParentText(1, 0, 2, 1)])],
2548
self.assertEqual(3, len(basis.calls))
2550
("get_parent_map", set([key_left, key_right])),
2551
("get_parent_map", set([key_left, key_right])),
2554
last_call = basis.calls[-1]
2555
self.assertEqual('get_record_stream', last_call[0])
2556
self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2557
self.assertEqual('topological', last_call[2])
2558
self.assertEqual(True, last_call[3])
2561
class TestNetworkBehaviour(KnitTests):
2562
"""Tests for getting data out of/into knits over the network."""
2564
def test_include_delta_closure_generates_a_knit_delta_closure(self):
2565
vf = self.make_test_knit(name='test')
2566
# put in three texts, giving ft, delta, delta
2567
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2568
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2569
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2570
# But heuristics could interfere, so check what happened:
2571
self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
2572
[record.storage_kind for record in
2573
vf.get_record_stream([('base',), ('d1',), ('d2',)],
2574
'topological', False)])
2575
# generate a stream of just the deltas include_delta_closure=True,
2576
# serialise to the network, and check that we get a delta closure on the wire.
2577
stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
2578
netb = [record.get_bytes_as(record.storage_kind) for record in stream]
2579
# The first bytes should be a memo from _ContentMapGenerator, and the
2580
# second bytes should be empty (because its a API proxy not something
2581
# for wire serialisation.
2582
self.assertEqual('', netb[1])
2584
kind, line_end = network_bytes_to_kind_and_offset(bytes)
2585
self.assertEqual('knit-delta-closure', kind)
2588
class TestContentMapGenerator(KnitTests):
2589
"""Tests for ContentMapGenerator"""
2591
def test_get_record_stream_gives_records(self):
2592
vf = self.make_test_knit(name='test')
2593
# put in three texts, giving ft, delta, delta
2594
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2595
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2596
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2597
keys = [('d1',), ('d2',)]
2598
generator = _VFContentMapGenerator(vf, keys,
2599
global_map=vf.get_parent_map(keys))
2600
for record in generator.get_record_stream():
2601
if record.key == ('d1',):
2602
self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
2604
self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
2606
def test_get_record_stream_kinds_are_raw(self):
2607
vf = self.make_test_knit(name='test')
2608
# put in three texts, giving ft, delta, delta
2609
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2610
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2611
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2612
keys = [('base',), ('d1',), ('d2',)]
2613
generator = _VFContentMapGenerator(vf, keys,
2614
global_map=vf.get_parent_map(keys))
2615
kinds = {('base',): 'knit-delta-closure',
2616
('d1',): 'knit-delta-closure-ref',
2617
('d2',): 'knit-delta-closure-ref',
2619
for record in generator.get_record_stream():
2620
self.assertEqual(kinds[record.key], record.storage_kind)
2788
def test_iter_parents(self):
2789
index = self.two_graph_index()
2790
self.assertEqual(set([
2791
('tip', ()), ('tail', ()), ('parent', ()), ('separate', ())
2793
set(index.iter_parents(['tip', 'tail', 'ghost', 'parent', 'separate'])))
2794
self.assertEqual(set([('tip', ())]),
2795
set(index.iter_parents(['tip'])))
2796
self.assertEqual(set(),
2797
set(index.iter_parents([])))
2800
class TestPackKnits(KnitTests):
2801
"""Tests that use a _PackAccess and KnitGraphIndex."""
2803
def test_get_data_stream_packs_ignores_pack_overhead(self):
2804
# Packs have an encoding overhead that should not be included in the
2805
# 'size' field of a data stream, because it is not returned by the
2806
# raw_reading functions - it is why index_memo's are opaque, and
2807
# get_data_stream was abusing this.
2808
packname = 'test.pack'
2809
transport = self.get_transport()
2810
def write_data(bytes):
2811
transport.append_bytes(packname, bytes)
2812
writer = pack.ContainerWriter(write_data)
2814
index = InMemoryGraphIndex(2)
2815
knit_index = KnitGraphIndex(index, add_callback=index.add_nodes,
2817
indices = {index:(transport, packname)}
2818
access = _PackAccess(indices, writer=(writer, index))
2819
k = KnitVersionedFile('test', get_transport('.'),
2820
delta=True, create=True, index=knit_index, access_method=access)
2821
# insert something into the knit
2822
k.add_lines('text-1', [], ["foo\n"])
2823
# get a data stream for it
2824
stream = k.get_data_stream(['text-1'])
2825
# if the stream has been incorrectly assembled, we will get a short read
2826
# reading from the stream (as streams have no trailer)
2827
expected_length = stream[1][0][2]
2828
# we use -1 to do the read, so that if a trailer is added this test
2829
# will fail and we'll adjust it to handle that case correctly, rather
2830
# than allowing an over-read that is bogus.
2831
self.assertEqual(expected_length, len(stream[2](-1)))
2834
class Test_StreamIndex(KnitTests):
2836
def get_index(self, knit, stream):
2837
"""Get a _StreamIndex from knit and stream."""
2838
return knit._knit_from_datastream(stream)._index
2840
def assertIndexVersions(self, knit, versions):
2841
"""Check that the _StreamIndex versions are those of the stream."""
2842
index = self.get_index(knit, knit.get_data_stream(versions))
2843
self.assertEqual(set(index.get_versions()), set(versions))
2844
# check we didn't get duplicates
2845
self.assertEqual(len(index.get_versions()), len(versions))
2847
def assertIndexAncestry(self, knit, ancestry_versions, versions, result):
2848
"""Check the result of a get_ancestry call on knit."""
2849
index = self.get_index(knit, knit.get_data_stream(versions))
2852
set(index.get_ancestry(ancestry_versions, False)))
2854
def assertIterParents(self, knit, versions, parent_versions, result):
2855
"""Check the result of an iter_parents call on knit."""
2856
index = self.get_index(knit, knit.get_data_stream(versions))
2857
self.assertEqual(result, index.iter_parents(parent_versions))
2859
def assertGetMethod(self, knit, versions, version, result):
2860
index = self.get_index(knit, knit.get_data_stream(versions))
2861
self.assertEqual(result, index.get_method(version))
2863
def assertGetOptions(self, knit, version, options):
2864
index = self.get_index(knit, knit.get_data_stream(version))
2865
self.assertEqual(options, index.get_options(version))
2867
def assertGetPosition(self, knit, versions, version, result):
2868
index = self.get_index(knit, knit.get_data_stream(versions))
2869
if result[1] is None:
2870
result = (result[0], index, result[2], result[3])
2871
self.assertEqual(result, index.get_position(version))
2873
def assertGetParentsWithGhosts(self, knit, versions, version, parents):
2874
index = self.get_index(knit, knit.get_data_stream(versions))
2875
self.assertEqual(parents, index.get_parents_with_ghosts(version))
2877
def make_knit_with_4_versions_2_dags(self):
2878
knit = self.make_test_knit()
2879
knit.add_lines('a', [], ["foo"])
2880
knit.add_lines('b', [], [])
2881
knit.add_lines('c', ['b', 'a'], [])
2882
knit.add_lines_with_ghosts('d', ['e', 'f'], [])
2885
def test_versions(self):
2886
"""The versions of a StreamIndex are those of the datastream."""
2887
knit = self.make_knit_with_4_versions_2_dags()
2888
# ask for most permutations, which catches bugs like falling back to the
2889
# target knit, or showing ghosts, etc.
2890
self.assertIndexVersions(knit, [])
2891
self.assertIndexVersions(knit, ['a'])
2892
self.assertIndexVersions(knit, ['b'])
2893
self.assertIndexVersions(knit, ['c'])
2894
self.assertIndexVersions(knit, ['d'])
2895
self.assertIndexVersions(knit, ['a', 'b'])
2896
self.assertIndexVersions(knit, ['b', 'c'])
2897
self.assertIndexVersions(knit, ['a', 'c'])
2898
self.assertIndexVersions(knit, ['a', 'b', 'c'])
2899
self.assertIndexVersions(knit, ['a', 'b', 'c', 'd'])
2901
def test_construct(self):
2902
"""Constructing a StreamIndex generates index data."""
2903
data_list = [('text-a', ['fulltext'], 127, []),
2904
('text-b', ['option'], 128, ['text-c'])]
2905
index = _StreamIndex(data_list)
2906
self.assertEqual({'text-a':(['fulltext'], (0, 127), []),
2907
'text-b':(['option'], (127, 127 + 128), ['text-c'])},
2910
def test_get_ancestry(self):
2911
knit = self.make_knit_with_4_versions_2_dags()
2912
self.assertIndexAncestry(knit, ['a'], ['a'], ['a'])
2913
self.assertIndexAncestry(knit, ['b'], ['b'], ['b'])
2914
self.assertIndexAncestry(knit, ['c'], ['c'], ['c'])
2915
self.assertIndexAncestry(knit, ['c'], ['a', 'b', 'c'],
2916
set(['a', 'b', 'c']))
2917
self.assertIndexAncestry(knit, ['c', 'd'], ['a', 'b', 'c', 'd'],
2918
set(['a', 'b', 'c', 'd']))
2920
def test_get_method(self):
2921
knit = self.make_knit_with_4_versions_2_dags()
2922
self.assertGetMethod(knit, ['a'], 'a', 'fulltext')
2923
self.assertGetMethod(knit, ['c'], 'c', 'line-delta')
2924
# get_method on a basis that is not in the datastream (but in the
2925
# backing knit) returns 'fulltext', because thats what we'll create as
2927
self.assertGetMethod(knit, ['c'], 'b', 'fulltext')
2929
def test_iter_parents(self):
2930
knit = self.make_knit_with_4_versions_2_dags()
2931
self.assertIterParents(knit, ['a'], ['a'], [('a', [])])
2932
self.assertIterParents(knit, ['a', 'b'], ['a', 'b'],
2933
[('a', []), ('b', [])])
2934
self.assertIterParents(knit, ['a', 'b', 'c'], ['a', 'b', 'c'],
2935
[('a', []), ('b', []), ('c', ['b', 'a'])])
2936
self.assertIterParents(knit, ['a', 'b', 'c', 'd'],
2937
['a', 'b', 'c', 'd'],
2938
[('a', []), ('b', []), ('c', ['b', 'a']), ('d', ['e', 'f'])])
2939
self.assertIterParents(knit, ['c'], ['a', 'b', 'c'],
2940
[('c', ['b', 'a'])])
2942
def test_get_options(self):
2943
knit = self.make_knit_with_4_versions_2_dags()
2944
self.assertGetOptions(knit, 'a', ['no-eol', 'fulltext'])
2945
self.assertGetOptions(knit, 'c', ['line-delta'])
2947
def test_get_parents_with_ghosts(self):
2948
knit = self.make_knit_with_4_versions_2_dags()
2949
self.assertGetParentsWithGhosts(knit, ['a'], 'a', [])
2950
self.assertGetParentsWithGhosts(knit, ['c'], 'c', ['b', 'a'])
2951
self.assertGetParentsWithGhosts(knit, ['d'], 'd', ['e', 'f'])
2953
def test_get_position(self):
2954
knit = self.make_knit_with_4_versions_2_dags()
2955
# get_position returns (thunk_flag, index(can be None), start, end) for
2956
# _StreamAccess to use.
2957
self.assertGetPosition(knit, ['a'], 'a', (False, None, 0, 78))
2958
self.assertGetPosition(knit, ['a', 'c'], 'c', (False, None, 78, 156))
2959
# get_position on a text that is not in the datastream (but in the
2960
# backing knit) returns (True, 'versionid', None, None) - and then the
2961
# access object can construct the relevant data as needed.
2962
self.assertGetPosition(knit, ['a', 'c'], 'b', (True, 'b', None, None))
2965
class Test_StreamAccess(KnitTests):
2967
def get_index_access(self, knit, stream):
2968
"""Get a _StreamAccess from knit and stream."""
2969
knit = knit._knit_from_datastream(stream)
2970
return knit._index, knit._data._access
2972
def assertGetRawRecords(self, knit, versions):
2973
index, access = self.get_index_access(knit,
2974
knit.get_data_stream(versions))
2975
# check that every version asked for can be obtained from the resulting
2979
for version in versions:
2980
memos.append(knit._index.get_position(version))
2982
for version, data in zip(
2983
versions, knit._data._access.get_raw_records(memos)):
2984
original[version] = data
2986
for version in versions:
2987
memos.append(index.get_position(version))
2989
for version, data in zip(versions, access.get_raw_records(memos)):
2990
streamed[version] = data
2991
self.assertEqual(original, streamed)
2993
for version in versions:
2994
data = list(access.get_raw_records(
2995
[index.get_position(version)]))[0]
2996
self.assertEqual(original[version], data)
2998
def make_knit_with_two_versions(self):
2999
knit = self.make_test_knit()
3000
knit.add_lines('a', [], ["foo"])
3001
knit.add_lines('b', [], ["bar"])
3004
def test_get_raw_records(self):
3005
knit = self.make_knit_with_two_versions()
3006
self.assertGetRawRecords(knit, ['a', 'b'])
3007
self.assertGetRawRecords(knit, ['a'])
3008
self.assertGetRawRecords(knit, ['b'])
3010
def test_get_raw_record_from_backing_knit(self):
3011
# the thunk layer should create an artificial A on-demand when needed.
3012
source_knit = self.make_test_knit(name='plain', annotate=False)
3013
target_knit = self.make_test_knit(name='annotated', annotate=True)
3014
source_knit.add_lines("A", [], ["Foo\n"])
3015
# Give the target A, so we can try to thunk across to it.
3016
target_knit.join(source_knit)
3017
index, access = self.get_index_access(target_knit,
3018
source_knit.get_data_stream([]))
3019
raw_data = list(access.get_raw_records([(True, "A", None, None)]))[0]
3020
df = GzipFile(mode='rb', fileobj=StringIO(raw_data))
3022
'version A 1 5d36b88bb697a2d778f024048bafabd443d74503\n'
3026
def test_asking_for_thunk_stream_is_not_plain_errors(self):
3027
knit = self.make_test_knit(name='annotated', annotate=True)
3028
knit.add_lines("A", [], ["Foo\n"])
3029
index, access = self.get_index_access(knit,
3030
knit.get_data_stream([]))
3031
self.assertRaises(errors.KnitCorrupt,
3032
list, access.get_raw_records([(True, "A", None, None)]))