/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Robert Collins
  • Date: 2009-08-26 01:18:13 UTC
  • mto: This revision was merged to the branch mainline in revision 4656.
  • Revision ID: robertc@robertcollins.net-20090826011813-46x8kcuzwz97opoi
Deserialise IncompatibleRepositories errors in the client, generating
nicer feedback when the smart server encounters such a situation.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
import sys
 
26
from StringIO import StringIO
27
27
 
28
28
import bzrlib
29
 
from bzrlib.errors import (NoSuchFile,
 
29
from bzrlib.errors import (NotBranchError,
 
30
                           NoSuchFile,
30
31
                           UnknownFormatError,
31
32
                           UnsupportedFormatError,
32
33
                           )
34
35
    graph,
35
36
    tests,
36
37
    )
 
38
from bzrlib.branchbuilder import BranchBuilder
37
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
38
 
from bzrlib.index import GraphIndex
 
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
39
41
from bzrlib.repository import RepositoryFormat
 
42
from bzrlib.smart import server
40
43
from bzrlib.tests import (
41
44
    TestCase,
42
45
    TestCaseWithTransport,
 
46
    TestSkipped,
 
47
    test_knit,
43
48
    )
44
49
from bzrlib.transport import (
 
50
    fakenfs,
45
51
    get_transport,
46
52
    )
 
53
from bzrlib.transport.memory import MemoryServer
47
54
from bzrlib import (
 
55
    bencode,
48
56
    bzrdir,
49
57
    errors,
50
58
    inventory,
51
59
    osutils,
 
60
    progress,
52
61
    repository,
53
62
    revision as _mod_revision,
 
63
    symbol_versioning,
54
64
    upgrade,
55
 
    versionedfile,
56
65
    workingtree,
57
66
    )
58
67
from bzrlib.repofmt import (
243
252
        tree = control.create_workingtree()
244
253
        tree.add(['foo'], ['Foo:Bar'], ['file'])
245
254
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
246
 
        try:
247
 
            tree.commit('first post', rev_id='first')
248
 
        except errors.IllegalPath:
249
 
            if sys.platform != 'win32':
250
 
                raise
251
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
252
 
                              ' in repo format 7')
253
 
            return
 
255
        tree.commit('first post', rev_id='first')
254
256
        self.assertEqualDiff(
255
257
            '# bzr weave file v5\n'
256
258
            'i\n'
454
456
        repo = self.make_repository('.',
455
457
                format=bzrdir.format_registry.get('knit')())
456
458
        inv_xml = '<inventory format="5">\n</inventory>\n'
457
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
459
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
458
460
        self.assertEqual('test-rev-id', inv.root.revision)
459
461
 
460
462
    def test_deserialise_uses_global_revision_id(self):
466
468
        # Arguably, the deserialise_inventory should detect a mismatch, and
467
469
        # raise an error, rather than silently using one revision_id over the
468
470
        # other.
469
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
471
        self.assertRaises(AssertionError, repo.deserialise_inventory,
470
472
            'test-rev-id', inv_xml)
471
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
473
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
472
474
        self.assertEqual('other-rev-id', inv.root.revision)
473
475
 
474
476
    def test_supports_external_lookups(self):
679
681
        self.assertFalse(repo._format.supports_external_lookups)
680
682
 
681
683
 
682
 
class Test2a(tests.TestCaseWithMemoryTransport):
683
 
 
684
 
    def test_fetch_combines_groups(self):
685
 
        builder = self.make_branch_builder('source', format='2a')
686
 
        builder.start_series()
687
 
        builder.build_snapshot('1', None, [
688
 
            ('add', ('', 'root-id', 'directory', '')),
689
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
690
 
        builder.build_snapshot('2', ['1'], [
691
 
            ('modify', ('file-id', 'content-2\n'))])
692
 
        builder.finish_series()
693
 
        source = builder.get_branch()
694
 
        target = self.make_repository('target', format='2a')
695
 
        target.fetch(source.repository)
696
 
        target.lock_read()
697
 
        self.addCleanup(target.unlock)
698
 
        details = target.texts._index.get_build_details(
699
 
            [('file-id', '1',), ('file-id', '2',)])
700
 
        file_1_details = details[('file-id', '1')]
701
 
        file_2_details = details[('file-id', '2')]
702
 
        # The index, and what to read off disk, should be the same for both
703
 
        # versions of the file.
704
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
705
 
 
706
 
    def test_fetch_combines_groups(self):
707
 
        builder = self.make_branch_builder('source', format='2a')
708
 
        builder.start_series()
709
 
        builder.build_snapshot('1', None, [
710
 
            ('add', ('', 'root-id', 'directory', '')),
711
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
712
 
        builder.build_snapshot('2', ['1'], [
713
 
            ('modify', ('file-id', 'content-2\n'))])
714
 
        builder.finish_series()
715
 
        source = builder.get_branch()
716
 
        target = self.make_repository('target', format='2a')
717
 
        target.fetch(source.repository)
718
 
        target.lock_read()
719
 
        self.addCleanup(target.unlock)
720
 
        details = target.texts._index.get_build_details(
721
 
            [('file-id', '1',), ('file-id', '2',)])
722
 
        file_1_details = details[('file-id', '1')]
723
 
        file_2_details = details[('file-id', '2')]
724
 
        # The index, and what to read off disk, should be the same for both
725
 
        # versions of the file.
726
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
727
 
 
728
 
    def test_fetch_combines_groups(self):
729
 
        builder = self.make_branch_builder('source', format='2a')
730
 
        builder.start_series()
731
 
        builder.build_snapshot('1', None, [
732
 
            ('add', ('', 'root-id', 'directory', '')),
733
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
734
 
        builder.build_snapshot('2', ['1'], [
735
 
            ('modify', ('file-id', 'content-2\n'))])
736
 
        builder.finish_series()
737
 
        source = builder.get_branch()
738
 
        target = self.make_repository('target', format='2a')
739
 
        target.fetch(source.repository)
740
 
        target.lock_read()
741
 
        self.addCleanup(target.unlock)
742
 
        details = target.texts._index.get_build_details(
743
 
            [('file-id', '1',), ('file-id', '2',)])
744
 
        file_1_details = details[('file-id', '1')]
745
 
        file_2_details = details[('file-id', '2')]
746
 
        # The index, and what to read off disk, should be the same for both
747
 
        # versions of the file.
748
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
684
class Test2a(TestCaseWithTransport):
749
685
 
750
686
    def test_format_pack_compresses_True(self):
751
687
        repo = self.make_repository('repo', format='2a')
752
688
        self.assertTrue(repo._format.pack_compresses)
753
689
 
754
690
    def test_inventories_use_chk_map_with_parent_base_dict(self):
755
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
756
 
        tree.lock_write()
757
 
        tree.add([''], ['TREE_ROOT'])
 
691
        tree = self.make_branch_and_tree('repo', format="2a")
758
692
        revid = tree.commit("foo")
759
 
        tree.unlock()
760
693
        tree.lock_read()
761
694
        self.addCleanup(tree.unlock)
762
695
        inv = tree.branch.repository.get_inventory(revid)
771
704
        # at 20 unchanged commits, chk pages are packed that are split into
772
705
        # two groups such that the new pack being made doesn't have all its
773
706
        # pages in the source packs (though they are in the repository).
774
 
        # Use a memory backed repository, we don't need to hit disk for this
775
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
776
 
        tree.lock_write()
777
 
        self.addCleanup(tree.unlock)
778
 
        tree.add([''], ['TREE_ROOT'])
 
707
        tree = self.make_branch_and_tree('tree', format='2a')
779
708
        for pos in range(20):
780
709
            tree.commit(str(pos))
781
710
 
782
711
    def test_pack_with_hint(self):
783
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
784
 
        tree.lock_write()
785
 
        self.addCleanup(tree.unlock)
786
 
        tree.add([''], ['TREE_ROOT'])
 
712
        tree = self.make_branch_and_tree('tree', format='2a')
787
713
        # 1 commit to leave untouched
788
714
        tree.commit('1')
789
715
        to_keep = tree.branch.repository._pack_collection.names()
1028
954
            inv = inventory.Inventory(revision_id='rev1a')
1029
955
            inv.root.revision = 'rev1a'
1030
956
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1031
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1032
957
            repo.add_inventory('rev1a', inv, [])
1033
958
            revision = _mod_revision.Revision('rev1a',
1034
959
                committer='jrandom@example.com', timestamp=0,
1069
994
    def add_revision(self, repo, revision_id, inv, parent_ids):
1070
995
        inv.revision_id = revision_id
1071
996
        inv.root.revision = revision_id
1072
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1073
997
        repo.add_inventory(revision_id, inv, parent_ids)
1074
998
        revision = _mod_revision.Revision(revision_id,
1075
999
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1133
1057
        packs.ensure_loaded()
1134
1058
        return tree, r, packs, [rev1, rev2, rev3]
1135
1059
 
1136
 
    def test__clear_obsolete_packs(self):
1137
 
        packs = self.get_packs()
1138
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1139
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1140
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1141
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1142
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1143
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1144
 
        res = packs._clear_obsolete_packs()
1145
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1146
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1147
 
 
1148
 
    def test__clear_obsolete_packs_preserve(self):
1149
 
        packs = self.get_packs()
1150
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1151
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1152
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1153
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1157
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1160
 
 
1161
1060
    def test__max_pack_count(self):
1162
1061
        """The maximum pack count is a function of the number of revisions."""
1163
1062
        # no revisions - one pack, so that we can have a revision free repo
1183
1082
        # check some arbitrary big numbers
1184
1083
        self.assertEqual(25, packs._max_pack_count(112894))
1185
1084
 
1186
 
    def test_repr(self):
1187
 
        packs = self.get_packs()
1188
 
        self.assertContainsRe(repr(packs),
1189
 
            'RepositoryPackCollection(.*Repository(.*))')
1190
 
 
1191
 
    def test__obsolete_packs(self):
1192
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1193
 
        names = packs.names()
1194
 
        pack = packs.get_pack_by_name(names[0])
1195
 
        # Schedule this one for removal
1196
 
        packs._remove_pack_from_memory(pack)
1197
 
        # Simulate a concurrent update by renaming the .pack file and one of
1198
 
        # the indices
1199
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1200
 
                               'obsolete_packs/%s.pack' % (names[0],))
1201
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1202
 
                               'obsolete_packs/%s.iix' % (names[0],))
1203
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1204
 
        # are still renamed
1205
 
        packs._obsolete_packs([pack])
1206
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1207
 
                         sorted(packs._pack_transport.list_dir('.')))
1208
 
        # names[0] should not be present in the index anymore
1209
 
        self.assertEqual(names[1:],
1210
 
            sorted(set([osutils.splitext(n)[0] for n in
1211
 
                        packs._index_transport.list_dir('.')])))
1212
 
 
1213
1085
    def test_pack_distribution_zero(self):
1214
1086
        packs = self.get_packs()
1215
1087
        self.assertEqual([0], packs.pack_distribution(0))
1383
1255
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1384
1256
        self.assertFalse(packs.reload_pack_names())
1385
1257
 
1386
 
    def test_reload_pack_names_preserves_pending(self):
1387
 
        # TODO: Update this to also test for pending-deleted names
1388
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1389
 
        # We will add one pack (via start_write_group + insert_record_stream),
1390
 
        # and remove another pack (via _remove_pack_from_memory)
1391
 
        orig_names = packs.names()
1392
 
        orig_at_load = packs._packs_at_load
1393
 
        to_remove_name = iter(orig_names).next()
1394
 
        r.start_write_group()
1395
 
        self.addCleanup(r.abort_write_group)
1396
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1397
 
            ('text', 'rev'), (), None, 'content\n')])
1398
 
        new_pack = packs._new_pack
1399
 
        self.assertTrue(new_pack.data_inserted())
1400
 
        new_pack.finish()
1401
 
        packs.allocate(new_pack)
1402
 
        packs._new_pack = None
1403
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1404
 
        packs._remove_pack_from_memory(removed_pack)
1405
 
        names = packs.names()
1406
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1407
 
        new_names = set([x[0][0] for x in new_nodes])
1408
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1410
 
        self.assertEqual(set([new_pack.name]), new_names)
1411
 
        self.assertEqual([to_remove_name],
1412
 
                         sorted([x[0][0] for x in deleted_nodes]))
1413
 
        packs.reload_pack_names()
1414
 
        reloaded_names = packs.names()
1415
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1416
 
        self.assertEqual(names, reloaded_names)
1417
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
 
        new_names = set([x[0][0] for x in new_nodes])
1419
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1421
 
        self.assertEqual(set([new_pack.name]), new_names)
1422
 
        self.assertEqual([to_remove_name],
1423
 
                         sorted([x[0][0] for x in deleted_nodes]))
1424
 
 
1425
 
    def test_autopack_obsoletes_new_pack(self):
1426
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1427
 
        packs._max_pack_count = lambda x: 1
1428
 
        packs.pack_distribution = lambda x: [10]
1429
 
        r.start_write_group()
1430
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1431
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1432
 
        # This should trigger an autopack, which will combine everything into a
1433
 
        # single pack file.
1434
 
        new_names = r.commit_write_group()
1435
 
        names = packs.names()
1436
 
        self.assertEqual(1, len(names))
1437
 
        self.assertEqual([names[0] + '.pack'],
1438
 
                         packs._pack_transport.list_dir('.'))
1439
 
 
1440
1258
    def test_autopack_reloads_and_stops(self):
1441
1259
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1442
1260
        # After we have determined what needs to be autopacked, trigger a
1454
1272
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1455
1273
                         packs.names())
1456
1274
 
1457
 
    def test__save_pack_names(self):
1458
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1459
 
        names = packs.names()
1460
 
        pack = packs.get_pack_by_name(names[0])
1461
 
        packs._remove_pack_from_memory(pack)
1462
 
        packs._save_pack_names(obsolete_packs=[pack])
1463
 
        cur_packs = packs._pack_transport.list_dir('.')
1464
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1466
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1468
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1469
 
 
1470
 
    def test__save_pack_names_already_obsoleted(self):
1471
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1472
 
        names = packs.names()
1473
 
        pack = packs.get_pack_by_name(names[0])
1474
 
        packs._remove_pack_from_memory(pack)
1475
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1476
 
        # the pack directly.
1477
 
        packs._obsolete_packs([pack])
1478
 
        packs._save_pack_names(clear_obsolete_packs=True,
1479
 
                               obsolete_packs=[pack])
1480
 
        cur_packs = packs._pack_transport.list_dir('.')
1481
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1482
 
        # Note that while we set clear_obsolete_packs=True, it should not
1483
 
        # delete a pack file that we have also scheduled for obsoletion.
1484
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1486
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1487
 
 
1488
 
 
1489
1275
 
1490
1276
class TestPack(TestCaseWithTransport):
1491
1277
    """Tests for the Pack object."""
1555
1341
            index_class=BTreeGraphIndex,
1556
1342
            use_chk_index=False)
1557
1343
        pack = pack_repo.NewPack(collection)
1558
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1559
1344
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1560
1345
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1561
1346
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1614
1399
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1615
1400
                                            [], '.test')
1616
1401
        new_pack = packer.open_pack()
1617
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1618
1402
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1619
1403
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1620
1404
        self.assertTrue(new_pack.inventory_index._optimize_for_size)