/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_versionedfile.py

  • Committer: John Arbash Meinel
  • Date: 2009-07-31 17:42:29 UTC
  • mto: This revision was merged to the branch mainline in revision 4611.
  • Revision ID: john@arbash-meinel.com-20090731174229-w2zdsdlfpeddk8gl
Now we got to the per-workingtree tests, etc.

The main causes seem to break down into:
  bzrdir.clone() is known to be broken wrt locking, this effects
  everything that tries to 'push'

  shelf code is not compatible with strict locking

  merge code seems to have an issue. This might actually be the
  root cause of the clone() problems.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2005, 2009 Canonical Ltd
2
2
#
3
3
# Authors:
4
4
#   Johan Rydberg <jrydberg@gnu.org>
26
26
 
27
27
from bzrlib import (
28
28
    errors,
29
 
    graph as _mod_graph,
30
29
    groupcompress,
31
30
    knit as _mod_knit,
32
31
    osutils,
33
32
    progress,
34
 
    ui,
35
33
    )
36
34
from bzrlib.errors import (
37
35
                           RevisionNotPresent,
734
732
        # the ordering here is to make a tree so that dumb searches have
735
733
        # more changes to muck up.
736
734
 
737
 
        class InstrumentedProgress(progress.ProgressTask):
 
735
        class InstrumentedProgress(progress.DummyProgress):
738
736
 
739
737
            def __init__(self):
740
 
                progress.ProgressTask.__init__(self)
 
738
 
 
739
                progress.DummyProgress.__init__(self)
741
740
                self.updates = []
742
741
 
743
742
            def update(self, msg=None, current=None, total=None):
1469
1468
            transport.mkdir('.')
1470
1469
        files = self.factory(transport)
1471
1470
        if self.cleanup is not None:
1472
 
            self.addCleanup(self.cleanup, files)
 
1471
            self.addCleanup(lambda:self.cleanup(files))
1473
1472
        return files
1474
1473
 
1475
1474
    def get_simple_key(self, suffix):
1558
1557
        self.assertRaises(RevisionNotPresent,
1559
1558
            files.annotate, prefix + ('missing-key',))
1560
1559
 
1561
 
    def test_check_no_parameters(self):
1562
 
        files = self.get_versionedfiles()
1563
 
 
1564
 
    def test_check_progressbar_parameter(self):
1565
 
        """A progress bar can be supplied because check can be a generator."""
1566
 
        pb = ui.ui_factory.nested_progress_bar()
1567
 
        self.addCleanup(pb.finished)
1568
 
        files = self.get_versionedfiles()
1569
 
        files.check(progress_bar=pb)
1570
 
 
1571
 
    def test_check_with_keys_becomes_generator(self):
 
1560
    def test_get_annotator(self):
1572
1561
        files = self.get_versionedfiles()
1573
1562
        self.get_diamond_files(files)
1574
 
        keys = files.keys()
1575
 
        entries = files.check(keys=keys)
1576
 
        seen = set()
1577
 
        # Texts output should be fulltexts.
1578
 
        self.capture_stream(files, entries, seen.add,
1579
 
            files.get_parent_map(keys), require_fulltext=True)
1580
 
        # All texts should be output.
1581
 
        self.assertEqual(set(keys), seen)
1582
 
 
1583
 
    def test_clear_cache(self):
1584
 
        files = self.get_versionedfiles()
1585
 
        files.clear_cache()
 
1563
        origin_key = self.get_simple_key('origin')
 
1564
        base_key = self.get_simple_key('base')
 
1565
        left_key = self.get_simple_key('left')
 
1566
        right_key = self.get_simple_key('right')
 
1567
        merged_key = self.get_simple_key('merged')
 
1568
        # annotator = files.get_annotator()
 
1569
        # introduced full text
 
1570
        origins, lines = files.get_annotator().annotate(origin_key)
 
1571
        self.assertEqual([(origin_key,)], origins)
 
1572
        self.assertEqual(['origin\n'], lines)
 
1573
        # a delta
 
1574
        origins, lines = files.get_annotator().annotate(base_key)
 
1575
        self.assertEqual([(base_key,)], origins)
 
1576
        # a merge
 
1577
        origins, lines = files.get_annotator().annotate(merged_key)
 
1578
        if self.graph:
 
1579
            self.assertEqual([
 
1580
                (base_key,),
 
1581
                (left_key,),
 
1582
                (right_key,),
 
1583
                (merged_key,),
 
1584
                ], origins)
 
1585
        else:
 
1586
            # Without a graph everything is new.
 
1587
            self.assertEqual([
 
1588
                (merged_key,),
 
1589
                (merged_key,),
 
1590
                (merged_key,),
 
1591
                (merged_key,),
 
1592
                ], origins)
 
1593
        self.assertRaises(RevisionNotPresent,
 
1594
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
1586
1595
 
1587
1596
    def test_construct(self):
1588
1597
        """Each parameterised test can be constructed on a transport."""
1741
1750
            f.get_record_stream([key_b], 'unordered', True
1742
1751
                ).next().get_bytes_as('fulltext'))
1743
1752
 
1744
 
    def test_get_known_graph_ancestry(self):
1745
 
        f = self.get_versionedfiles()
1746
 
        if not self.graph:
1747
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1748
 
        key_a = self.get_simple_key('a')
1749
 
        key_b = self.get_simple_key('b')
1750
 
        key_c = self.get_simple_key('c')
1751
 
        # A
1752
 
        # |\
1753
 
        # | B
1754
 
        # |/
1755
 
        # C
1756
 
        f.add_lines(key_a, [], ['\n'])
1757
 
        f.add_lines(key_b, [key_a], ['\n'])
1758
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1759
 
        kg = f.get_known_graph_ancestry([key_c])
1760
 
        self.assertIsInstance(kg, _mod_graph.KnownGraph)
1761
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1762
 
 
1763
 
    def test_known_graph_with_fallbacks(self):
1764
 
        f = self.get_versionedfiles('files')
1765
 
        if not self.graph:
1766
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
1767
 
        if getattr(f, 'add_fallback_versioned_files', None) is None:
1768
 
            raise TestNotApplicable("%s doesn't support fallbacks"
1769
 
                                    % (f.__class__.__name__,))
1770
 
        key_a = self.get_simple_key('a')
1771
 
        key_b = self.get_simple_key('b')
1772
 
        key_c = self.get_simple_key('c')
1773
 
        # A     only in fallback
1774
 
        # |\
1775
 
        # | B
1776
 
        # |/
1777
 
        # C
1778
 
        g = self.get_versionedfiles('fallback')
1779
 
        g.add_lines(key_a, [], ['\n'])
1780
 
        f.add_fallback_versioned_files(g)
1781
 
        f.add_lines(key_b, [key_a], ['\n'])
1782
 
        f.add_lines(key_c, [key_a, key_b], ['\n'])
1783
 
        kg = f.get_known_graph_ancestry([key_c])
1784
 
        self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1785
 
 
1786
1753
    def test_get_record_stream_empty(self):
1787
1754
        """An empty stream can be requested without error."""
1788
1755
        f = self.get_versionedfiles()
1799
1766
             'knit-delta-closure', 'knit-delta-closure-ref',
1800
1767
             'groupcompress-block', 'groupcompress-block-ref'])
1801
1768
 
1802
 
    def capture_stream(self, f, entries, on_seen, parents,
1803
 
        require_fulltext=False):
 
1769
    def capture_stream(self, f, entries, on_seen, parents):
1804
1770
        """Capture a stream for testing."""
1805
1771
        for factory in entries:
1806
1772
            on_seen(factory.key)
1811
1777
            self.assertEqual(parents[factory.key], factory.parents)
1812
1778
            self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1813
1779
                str)
1814
 
            if require_fulltext:
1815
 
                factory.get_bytes_as('fulltext')
1816
1780
 
1817
1781
    def test_get_record_stream_interface(self):
1818
1782
        """each item in a stream has to provide a regular interface."""
2193
2157
        else:
2194
2158
            return None
2195
2159
 
2196
 
    def test_get_annotator(self):
2197
 
        files = self.get_versionedfiles()
2198
 
        self.get_diamond_files(files)
2199
 
        origin_key = self.get_simple_key('origin')
2200
 
        base_key = self.get_simple_key('base')
2201
 
        left_key = self.get_simple_key('left')
2202
 
        right_key = self.get_simple_key('right')
2203
 
        merged_key = self.get_simple_key('merged')
2204
 
        # annotator = files.get_annotator()
2205
 
        # introduced full text
2206
 
        origins, lines = files.get_annotator().annotate(origin_key)
2207
 
        self.assertEqual([(origin_key,)], origins)
2208
 
        self.assertEqual(['origin\n'], lines)
2209
 
        # a delta
2210
 
        origins, lines = files.get_annotator().annotate(base_key)
2211
 
        self.assertEqual([(base_key,)], origins)
2212
 
        # a merge
2213
 
        origins, lines = files.get_annotator().annotate(merged_key)
2214
 
        if self.graph:
2215
 
            self.assertEqual([
2216
 
                (base_key,),
2217
 
                (left_key,),
2218
 
                (right_key,),
2219
 
                (merged_key,),
2220
 
                ], origins)
2221
 
        else:
2222
 
            # Without a graph everything is new.
2223
 
            self.assertEqual([
2224
 
                (merged_key,),
2225
 
                (merged_key,),
2226
 
                (merged_key,),
2227
 
                (merged_key,),
2228
 
                ], origins)
2229
 
        self.assertRaises(RevisionNotPresent,
2230
 
            files.get_annotator().annotate, self.get_simple_key('missing-key'))
2231
 
 
2232
2160
    def test_get_parent_map(self):
2233
2161
        files = self.get_versionedfiles()
2234
2162
        if self.key_length == 1:
2437
2365
        else:
2438
2366
            self.assertIdenticalVersionedFile(source, files)
2439
2367
 
2440
 
    def test_insert_record_stream_long_parent_chain_out_of_order(self):
2441
 
        """An out of order stream can either error or work."""
2442
 
        if not self.graph:
2443
 
            raise TestNotApplicable('ancestry info only relevant with graph.')
2444
 
        # Create a reasonably long chain of records based on each other, where
2445
 
        # most will be deltas.
2446
 
        source = self.get_versionedfiles('source')
2447
 
        parents = ()
2448
 
        keys = []
2449
 
        content = [('same same %d\n' % n) for n in range(500)]
2450
 
        for letter in 'abcdefghijklmnopqrstuvwxyz':
2451
 
            key = ('key-' + letter,)
2452
 
            if self.key_length == 2:
2453
 
                key = ('prefix',) + key
2454
 
            content.append('content for ' + letter + '\n')
2455
 
            source.add_lines(key, parents, content)
2456
 
            keys.append(key)
2457
 
            parents = (key,)
2458
 
        # Create a stream of these records, excluding the first record that the
2459
 
        # rest ultimately depend upon, and insert it into a new vf.
2460
 
        streams = []
2461
 
        for key in reversed(keys):
2462
 
            streams.append(source.get_record_stream([key], 'unordered', False))
2463
 
        deltas = chain(*streams[:-1])
2464
 
        files = self.get_versionedfiles()
2465
 
        try:
2466
 
            files.insert_record_stream(deltas)
2467
 
        except RevisionNotPresent:
2468
 
            # Must not have corrupted the file.
2469
 
            files.check()
2470
 
        else:
2471
 
            # Must only report either just the first key as a missing parent,
2472
 
            # no key as missing (for nodelta scenarios).
2473
 
            missing = set(files.get_missing_compression_parent_keys())
2474
 
            missing.discard(keys[0])
2475
 
            self.assertEqual(set(), missing)
2476
 
 
2477
2368
    def get_knit_delta_source(self):
2478
2369
        """Get a source that can produce a stream with knit delta records,
2479
2370
        regardless of this test's scenario.
2547
2438
        # the ordering here is to make a tree so that dumb searches have
2548
2439
        # more changes to muck up.
2549
2440
 
2550
 
        class InstrumentedProgress(progress.ProgressTask):
 
2441
        class InstrumentedProgress(progress.DummyProgress):
2551
2442
 
2552
2443
            def __init__(self):
2553
 
                progress.ProgressTask.__init__(self)
 
2444
 
 
2445
                progress.DummyProgress.__init__(self)
2554
2446
                self.updates = []
2555
2447
 
2556
2448
            def update(self, msg=None, current=None, total=None):
2745
2637
        self.assertRaises(NotImplementedError,
2746
2638
                self.texts.add_mpdiffs, [])
2747
2639
 
2748
 
    def test_check_noerrors(self):
2749
 
        self.texts.check()
 
2640
    def test_check(self):
 
2641
        self.assertTrue(self.texts.check())
2750
2642
 
2751
2643
    def test_insert_record_stream(self):
2752
2644
        self.assertRaises(NotImplementedError, self.texts.insert_record_stream,