38
from bzrlib.branchbuilder import BranchBuilder
37
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
38
from bzrlib.index import GraphIndex
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
39
41
from bzrlib.repository import RepositoryFormat
42
from bzrlib.smart import server
40
43
from bzrlib.tests import (
42
45
TestCaseWithTransport,
44
49
from bzrlib.transport import (
53
from bzrlib.transport.memory import MemoryServer
47
54
from bzrlib import (
53
62
revision as _mod_revision,
243
253
tree = control.create_workingtree()
244
254
tree.add(['foo'], ['Foo:Bar'], ['file'])
245
255
tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
247
tree.commit('first post', rev_id='first')
248
except errors.IllegalPath:
249
if sys.platform != 'win32':
251
self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
256
tree.commit('first post', rev_id='first')
254
257
self.assertEqualDiff(
255
258
'# bzr weave file v5\n'
466
469
# Arguably, the deserialise_inventory should detect a mismatch, and
467
470
# raise an error, rather than silently using one revision_id over the
469
self.assertRaises(AssertionError, repo._deserialise_inventory,
472
self.assertRaises(AssertionError, repo.deserialise_inventory,
470
473
'test-rev-id', inv_xml)
471
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
474
inv = repo.deserialise_inventory('other-rev-id', inv_xml)
472
475
self.assertEqual('other-rev-id', inv.root.revision)
474
477
def test_supports_external_lookups(self):
679
682
self.assertFalse(repo._format.supports_external_lookups)
682
class Test2a(tests.TestCaseWithMemoryTransport):
684
def test_fetch_combines_groups(self):
685
builder = self.make_branch_builder('source', format='2a')
686
builder.start_series()
687
builder.build_snapshot('1', None, [
688
('add', ('', 'root-id', 'directory', '')),
689
('add', ('file', 'file-id', 'file', 'content\n'))])
690
builder.build_snapshot('2', ['1'], [
691
('modify', ('file-id', 'content-2\n'))])
692
builder.finish_series()
693
source = builder.get_branch()
694
target = self.make_repository('target', format='2a')
695
target.fetch(source.repository)
697
self.addCleanup(target.unlock)
698
details = target.texts._index.get_build_details(
699
[('file-id', '1',), ('file-id', '2',)])
700
file_1_details = details[('file-id', '1')]
701
file_2_details = details[('file-id', '2')]
702
# The index, and what to read off disk, should be the same for both
703
# versions of the file.
704
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
706
def test_fetch_combines_groups(self):
707
builder = self.make_branch_builder('source', format='2a')
708
builder.start_series()
709
builder.build_snapshot('1', None, [
710
('add', ('', 'root-id', 'directory', '')),
711
('add', ('file', 'file-id', 'file', 'content\n'))])
712
builder.build_snapshot('2', ['1'], [
713
('modify', ('file-id', 'content-2\n'))])
714
builder.finish_series()
715
source = builder.get_branch()
716
target = self.make_repository('target', format='2a')
717
target.fetch(source.repository)
719
self.addCleanup(target.unlock)
720
details = target.texts._index.get_build_details(
721
[('file-id', '1',), ('file-id', '2',)])
722
file_1_details = details[('file-id', '1')]
723
file_2_details = details[('file-id', '2')]
724
# The index, and what to read off disk, should be the same for both
725
# versions of the file.
726
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
685
class Test2a(TestCaseWithTransport):
728
687
def test_fetch_combines_groups(self):
729
688
builder = self.make_branch_builder('source', format='2a')
752
711
self.assertTrue(repo._format.pack_compresses)
754
713
def test_inventories_use_chk_map_with_parent_base_dict(self):
755
tree = self.make_branch_and_memory_tree('repo', format="2a")
757
tree.add([''], ['TREE_ROOT'])
714
tree = self.make_branch_and_tree('repo', format="2a")
758
715
revid = tree.commit("foo")
761
717
self.addCleanup(tree.unlock)
762
718
inv = tree.branch.repository.get_inventory(revid)
771
727
# at 20 unchanged commits, chk pages are packed that are split into
772
728
# two groups such that the new pack being made doesn't have all its
773
729
# pages in the source packs (though they are in the repository).
774
# Use a memory backed repository, we don't need to hit disk for this
775
tree = self.make_branch_and_memory_tree('tree', format='2a')
777
self.addCleanup(tree.unlock)
778
tree.add([''], ['TREE_ROOT'])
730
tree = self.make_branch_and_tree('tree', format='2a')
779
731
for pos in range(20):
780
732
tree.commit(str(pos))
782
734
def test_pack_with_hint(self):
783
tree = self.make_branch_and_memory_tree('tree', format='2a')
785
self.addCleanup(tree.unlock)
786
tree.add([''], ['TREE_ROOT'])
735
tree = self.make_branch_and_tree('tree', format='2a')
787
736
# 1 commit to leave untouched
789
738
to_keep = tree.branch.repository._pack_collection.names()
1133
1082
packs.ensure_loaded()
1134
1083
return tree, r, packs, [rev1, rev2, rev3]
1136
def test__clear_obsolete_packs(self):
1137
packs = self.get_packs()
1138
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1139
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1140
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1141
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1142
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1143
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1144
res = packs._clear_obsolete_packs()
1145
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1146
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1148
def test__clear_obsolete_packs_preserve(self):
1149
packs = self.get_packs()
1150
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1151
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1153
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1157
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
sorted(obsolete_pack_trans.list_dir('.')))
1161
1085
def test__max_pack_count(self):
1162
1086
"""The maximum pack count is a function of the number of revisions."""
1163
1087
# no revisions - one pack, so that we can have a revision free repo
1183
1107
# check some arbitrary big numbers
1184
1108
self.assertEqual(25, packs._max_pack_count(112894))
1186
def test_repr(self):
1187
packs = self.get_packs()
1188
self.assertContainsRe(repr(packs),
1189
'RepositoryPackCollection(.*Repository(.*))')
1191
def test__obsolete_packs(self):
1192
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1193
names = packs.names()
1194
pack = packs.get_pack_by_name(names[0])
1195
# Schedule this one for removal
1196
packs._remove_pack_from_memory(pack)
1197
# Simulate a concurrent update by renaming the .pack file and one of
1199
packs.transport.rename('packs/%s.pack' % (names[0],),
1200
'obsolete_packs/%s.pack' % (names[0],))
1201
packs.transport.rename('indices/%s.iix' % (names[0],),
1202
'obsolete_packs/%s.iix' % (names[0],))
1203
# Now trigger the obsoletion, and ensure that all the remaining files
1205
packs._obsolete_packs([pack])
1206
self.assertEqual([n + '.pack' for n in names[1:]],
1207
sorted(packs._pack_transport.list_dir('.')))
1208
# names[0] should not be present in the index anymore
1209
self.assertEqual(names[1:],
1210
sorted(set([osutils.splitext(n)[0] for n in
1211
packs._index_transport.list_dir('.')])))
1213
1110
def test_pack_distribution_zero(self):
1214
1111
packs = self.get_packs()
1215
1112
self.assertEqual([0], packs.pack_distribution(0))
1403
1300
removed_pack = packs.get_pack_by_name(to_remove_name)
1404
1301
packs._remove_pack_from_memory(removed_pack)
1405
1302
names = packs.names()
1406
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1303
all_nodes, deleted_nodes, new_nodes = packs._diff_pack_names()
1407
1304
new_names = set([x[0][0] for x in new_nodes])
1408
1305
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
1306
self.assertEqual(set(names) - set(orig_names), new_names)
1414
1311
reloaded_names = packs.names()
1415
1312
self.assertEqual(orig_at_load, packs._packs_at_load)
1416
1313
self.assertEqual(names, reloaded_names)
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1314
all_nodes, deleted_nodes, new_nodes = packs._diff_pack_names()
1418
1315
new_names = set([x[0][0] for x in new_nodes])
1419
1316
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
1317
self.assertEqual(set(names) - set(orig_names), new_names)
1422
1319
self.assertEqual([to_remove_name],
1423
1320
sorted([x[0][0] for x in deleted_nodes]))
1425
def test_autopack_obsoletes_new_pack(self):
1426
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1427
packs._max_pack_count = lambda x: 1
1428
packs.pack_distribution = lambda x: [10]
1429
r.start_write_group()
1430
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1431
('bogus-rev',), (), None, 'bogus-content\n')])
1432
# This should trigger an autopack, which will combine everything into a
1434
new_names = r.commit_write_group()
1435
names = packs.names()
1436
self.assertEqual(1, len(names))
1437
self.assertEqual([names[0] + '.pack'],
1438
packs._pack_transport.list_dir('.'))
1440
1322
def test_autopack_reloads_and_stops(self):
1441
1323
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1442
1324
# After we have determined what needs to be autopacked, trigger a
1454
1336
self.assertEqual(tree.branch.repository._pack_collection.names(),
1457
def test__save_pack_names(self):
1458
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1459
names = packs.names()
1460
pack = packs.get_pack_by_name(names[0])
1461
packs._remove_pack_from_memory(pack)
1462
packs._save_pack_names(obsolete_packs=[pack])
1463
cur_packs = packs._pack_transport.list_dir('.')
1464
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
# obsolete_packs will also have stuff like .rix and .iix present.
1466
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1468
self.assertEqual([pack.name], sorted(obsolete_names))
1470
def test__save_pack_names_already_obsoleted(self):
1471
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1472
names = packs.names()
1473
pack = packs.get_pack_by_name(names[0])
1474
packs._remove_pack_from_memory(pack)
1475
# We are going to simulate a concurrent autopack by manually obsoleting
1476
# the pack directly.
1477
packs._obsolete_packs([pack])
1478
packs._save_pack_names(clear_obsolete_packs=True,
1479
obsolete_packs=[pack])
1480
cur_packs = packs._pack_transport.list_dir('.')
1481
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1482
# Note that while we set clear_obsolete_packs=True, it should not
1483
# delete a pack file that we have also scheduled for obsoletion.
1484
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1486
self.assertEqual([pack.name], sorted(obsolete_names))
1490
1340
class TestPack(TestCaseWithTransport):
1491
1341
"""Tests for the Pack object."""