38
from bzrlib.branchbuilder import BranchBuilder
37
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
38
from bzrlib.index import GraphIndex
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
39
41
from bzrlib.repository import RepositoryFormat
42
from bzrlib.smart import server
40
43
from bzrlib.tests import (
42
45
TestCaseWithTransport,
44
49
from bzrlib.transport import (
53
from bzrlib.transport.memory import MemoryServer
47
54
from bzrlib import (
53
62
revision as _mod_revision,
58
67
from bzrlib.repofmt import (
466
468
# Arguably, the deserialise_inventory should detect a mismatch, and
467
469
# raise an error, rather than silently using one revision_id over the
469
self.assertRaises(AssertionError, repo._deserialise_inventory,
471
self.assertRaises(AssertionError, repo.deserialise_inventory,
470
472
'test-rev-id', inv_xml)
471
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
473
inv = repo.deserialise_inventory('other-rev-id', inv_xml)
472
474
self.assertEqual('other-rev-id', inv.root.revision)
474
476
def test_supports_external_lookups(self):
679
681
self.assertFalse(repo._format.supports_external_lookups)
682
class Test2a(tests.TestCaseWithMemoryTransport):
684
def test_fetch_combines_groups(self):
685
builder = self.make_branch_builder('source', format='2a')
686
builder.start_series()
687
builder.build_snapshot('1', None, [
688
('add', ('', 'root-id', 'directory', '')),
689
('add', ('file', 'file-id', 'file', 'content\n'))])
690
builder.build_snapshot('2', ['1'], [
691
('modify', ('file-id', 'content-2\n'))])
692
builder.finish_series()
693
source = builder.get_branch()
694
target = self.make_repository('target', format='2a')
695
target.fetch(source.repository)
697
self.addCleanup(target.unlock)
698
details = target.texts._index.get_build_details(
699
[('file-id', '1',), ('file-id', '2',)])
700
file_1_details = details[('file-id', '1')]
701
file_2_details = details[('file-id', '2')]
702
# The index, and what to read off disk, should be the same for both
703
# versions of the file.
704
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
706
def test_fetch_combines_groups(self):
707
builder = self.make_branch_builder('source', format='2a')
708
builder.start_series()
709
builder.build_snapshot('1', None, [
710
('add', ('', 'root-id', 'directory', '')),
711
('add', ('file', 'file-id', 'file', 'content\n'))])
712
builder.build_snapshot('2', ['1'], [
713
('modify', ('file-id', 'content-2\n'))])
714
builder.finish_series()
715
source = builder.get_branch()
716
target = self.make_repository('target', format='2a')
717
target.fetch(source.repository)
719
self.addCleanup(target.unlock)
720
details = target.texts._index.get_build_details(
721
[('file-id', '1',), ('file-id', '2',)])
722
file_1_details = details[('file-id', '1')]
723
file_2_details = details[('file-id', '2')]
724
# The index, and what to read off disk, should be the same for both
725
# versions of the file.
726
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
684
class Test2a(TestCaseWithTransport):
728
686
def test_fetch_combines_groups(self):
729
687
builder = self.make_branch_builder('source', format='2a')
771
726
# at 20 unchanged commits, chk pages are packed that are split into
772
727
# two groups such that the new pack being made doesn't have all its
773
728
# pages in the source packs (though they are in the repository).
774
# Use a memory backed repository, we don't need to hit disk for this
775
tree = self.make_branch_and_memory_tree('tree', format='2a')
777
self.addCleanup(tree.unlock)
778
tree.add([''], ['TREE_ROOT'])
729
tree = self.make_branch_and_tree('tree', format='2a')
779
730
for pos in range(20):
780
731
tree.commit(str(pos))
782
733
def test_pack_with_hint(self):
783
tree = self.make_branch_and_memory_tree('tree', format='2a')
785
self.addCleanup(tree.unlock)
786
tree.add([''], ['TREE_ROOT'])
734
tree = self.make_branch_and_tree('tree', format='2a')
787
735
# 1 commit to leave untouched
789
737
to_keep = tree.branch.repository._pack_collection.names()
1133
1081
packs.ensure_loaded()
1134
1082
return tree, r, packs, [rev1, rev2, rev3]
1136
def test__clear_obsolete_packs(self):
1137
packs = self.get_packs()
1138
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1139
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1140
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1141
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1142
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1143
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1144
res = packs._clear_obsolete_packs()
1145
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1146
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1148
def test__clear_obsolete_packs_preserve(self):
1149
packs = self.get_packs()
1150
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1151
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1153
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1157
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
sorted(obsolete_pack_trans.list_dir('.')))
1161
1084
def test__max_pack_count(self):
1162
1085
"""The maximum pack count is a function of the number of revisions."""
1163
1086
# no revisions - one pack, so that we can have a revision free repo
1183
1106
# check some arbitrary big numbers
1184
1107
self.assertEqual(25, packs._max_pack_count(112894))
1186
def test_repr(self):
1187
packs = self.get_packs()
1188
self.assertContainsRe(repr(packs),
1189
'RepositoryPackCollection(.*Repository(.*))')
1191
def test__obsolete_packs(self):
1192
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1193
names = packs.names()
1194
pack = packs.get_pack_by_name(names[0])
1195
# Schedule this one for removal
1196
packs._remove_pack_from_memory(pack)
1197
# Simulate a concurrent update by renaming the .pack file and one of
1199
packs.transport.rename('packs/%s.pack' % (names[0],),
1200
'obsolete_packs/%s.pack' % (names[0],))
1201
packs.transport.rename('indices/%s.iix' % (names[0],),
1202
'obsolete_packs/%s.iix' % (names[0],))
1203
# Now trigger the obsoletion, and ensure that all the remaining files
1205
packs._obsolete_packs([pack])
1206
self.assertEqual([n + '.pack' for n in names[1:]],
1207
sorted(packs._pack_transport.list_dir('.')))
1208
# names[0] should not be present in the index anymore
1209
self.assertEqual(names[1:],
1210
sorted(set([osutils.splitext(n)[0] for n in
1211
packs._index_transport.list_dir('.')])))
1213
1109
def test_pack_distribution_zero(self):
1214
1110
packs = self.get_packs()
1215
1111
self.assertEqual([0], packs.pack_distribution(0))
1383
1279
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1384
1280
self.assertFalse(packs.reload_pack_names())
1386
def test_reload_pack_names_preserves_pending(self):
1387
# TODO: Update this to also test for pending-deleted names
1388
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1389
# We will add one pack (via start_write_group + insert_record_stream),
1390
# and remove another pack (via _remove_pack_from_memory)
1391
orig_names = packs.names()
1392
orig_at_load = packs._packs_at_load
1393
to_remove_name = iter(orig_names).next()
1394
r.start_write_group()
1395
self.addCleanup(r.abort_write_group)
1396
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1397
('text', 'rev'), (), None, 'content\n')])
1398
new_pack = packs._new_pack
1399
self.assertTrue(new_pack.data_inserted())
1401
packs.allocate(new_pack)
1402
packs._new_pack = None
1403
removed_pack = packs.get_pack_by_name(to_remove_name)
1404
packs._remove_pack_from_memory(removed_pack)
1405
names = packs.names()
1406
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1407
new_names = set([x[0][0] for x in new_nodes])
1408
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
self.assertEqual(set(names) - set(orig_names), new_names)
1410
self.assertEqual(set([new_pack.name]), new_names)
1411
self.assertEqual([to_remove_name],
1412
sorted([x[0][0] for x in deleted_nodes]))
1413
packs.reload_pack_names()
1414
reloaded_names = packs.names()
1415
self.assertEqual(orig_at_load, packs._packs_at_load)
1416
self.assertEqual(names, reloaded_names)
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1425
def test_autopack_obsoletes_new_pack(self):
1426
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1427
packs._max_pack_count = lambda x: 1
1428
packs.pack_distribution = lambda x: [10]
1429
r.start_write_group()
1430
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1431
('bogus-rev',), (), None, 'bogus-content\n')])
1432
# This should trigger an autopack, which will combine everything into a
1434
new_names = r.commit_write_group()
1435
names = packs.names()
1436
self.assertEqual(1, len(names))
1437
self.assertEqual([names[0] + '.pack'],
1438
packs._pack_transport.list_dir('.'))
1440
1282
def test_autopack_reloads_and_stops(self):
1441
1283
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1442
1284
# After we have determined what needs to be autopacked, trigger a
1454
1296
self.assertEqual(tree.branch.repository._pack_collection.names(),
1457
def test__save_pack_names(self):
1458
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1459
names = packs.names()
1460
pack = packs.get_pack_by_name(names[0])
1461
packs._remove_pack_from_memory(pack)
1462
packs._save_pack_names(obsolete_packs=[pack])
1463
cur_packs = packs._pack_transport.list_dir('.')
1464
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
# obsolete_packs will also have stuff like .rix and .iix present.
1466
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1468
self.assertEqual([pack.name], sorted(obsolete_names))
1470
def test__save_pack_names_already_obsoleted(self):
1471
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1472
names = packs.names()
1473
pack = packs.get_pack_by_name(names[0])
1474
packs._remove_pack_from_memory(pack)
1475
# We are going to simulate a concurrent autopack by manually obsoleting
1476
# the pack directly.
1477
packs._obsolete_packs([pack])
1478
packs._save_pack_names(clear_obsolete_packs=True,
1479
obsolete_packs=[pack])
1480
cur_packs = packs._pack_transport.list_dir('.')
1481
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1482
# Note that while we set clear_obsolete_packs=True, it should not
1483
# delete a pack file that we have also scheduled for obsoletion.
1484
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1486
self.assertEqual([pack.name], sorted(obsolete_names))
1490
1300
class TestPack(TestCaseWithTransport):
1491
1301
"""Tests for the Pack object."""