/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/pack_repo.py

  • Committer: Jonathan Lange
  • Date: 2009-12-09 09:20:42 UTC
  • mfrom: (4881 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4907.
  • Revision ID: jml@canonical.com-20091209092042-s2zgqcf8f39yzxpj
Merge trunk.

Show diffs side-by-side

added added

removed removed

Lines of Context:
54
54
    revision as _mod_revision,
55
55
    )
56
56
 
57
 
from bzrlib.decorators import needs_write_lock
 
57
from bzrlib.decorators import needs_write_lock, only_raises
58
58
from bzrlib.btree_index import (
59
59
    BTreeGraphIndex,
60
60
    BTreeBuilder,
73
73
    )
74
74
from bzrlib.trace import (
75
75
    mutter,
 
76
    note,
76
77
    warning,
77
78
    )
78
79
 
224
225
        return self.index_name('text', name)
225
226
 
226
227
    def _replace_index_with_readonly(self, index_type):
 
228
        unlimited_cache = False
 
229
        if index_type == 'chk':
 
230
            unlimited_cache = True
227
231
        setattr(self, index_type + '_index',
228
232
            self.index_class(self.index_transport,
229
233
                self.index_name(index_type, self.name),
230
 
                self.index_sizes[self.index_offset(index_type)]))
 
234
                self.index_sizes[self.index_offset(index_type)],
 
235
                unlimited_cache=unlimited_cache))
231
236
 
232
237
 
233
238
class ExistingPack(Pack):
422
427
        self._writer.begin()
423
428
        # what state is the pack in? (open, finished, aborted)
424
429
        self._state = 'open'
 
430
        # no name until we finish writing the content
 
431
        self.name = None
425
432
 
426
433
    def abort(self):
427
434
        """Cancel creating this pack."""
448
455
            self.signature_index.key_count() or
449
456
            (self.chk_index is not None and self.chk_index.key_count()))
450
457
 
 
458
    def finish_content(self):
 
459
        if self.name is not None:
 
460
            return
 
461
        self._writer.end()
 
462
        if self._buffer[1]:
 
463
            self._write_data('', flush=True)
 
464
        self.name = self._hash.hexdigest()
 
465
 
451
466
    def finish(self, suspend=False):
452
467
        """Finish the new pack.
453
468
 
459
474
         - stores the index size tuple for the pack in the index_sizes
460
475
           attribute.
461
476
        """
462
 
        self._writer.end()
463
 
        if self._buffer[1]:
464
 
            self._write_data('', flush=True)
465
 
        self.name = self._hash.hexdigest()
 
477
        self.finish_content()
466
478
        if not suspend:
467
479
            self._check_references()
468
480
        # write indices
1105
1117
            iterator is a tuple with:
1106
1118
            index, readv_vector, node_vector. readv_vector is a list ready to
1107
1119
            hand to the transport readv method, and node_vector is a list of
1108
 
            (key, eol_flag, references) for the the node retrieved by the
 
1120
            (key, eol_flag, references) for the node retrieved by the
1109
1121
            matching readv_vector.
1110
1122
        """
1111
1123
        # group by pack so we do one readv per pack
1567
1579
        # determine which packs need changing
1568
1580
        pack_operations = [[0, []]]
1569
1581
        for pack in self.all_packs():
1570
 
            if not hint or pack.name in hint:
 
1582
            if hint is None or pack.name in hint:
 
1583
                # Either no hint was provided (so we are packing everything),
 
1584
                # or this pack was included in the hint.
1571
1585
                pack_operations[-1][0] += pack.get_revision_count()
1572
1586
                pack_operations[-1][1].append(pack)
1573
1587
        self._execute_pack_operations(pack_operations, OptimisingPacker)
1665
1679
            txt_index = self._make_index(name, '.tix')
1666
1680
            sig_index = self._make_index(name, '.six')
1667
1681
            if self.chk_index is not None:
1668
 
                chk_index = self._make_index(name, '.cix')
 
1682
                chk_index = self._make_index(name, '.cix', unlimited_cache=True)
1669
1683
            else:
1670
1684
                chk_index = None
1671
1685
            result = ExistingPack(self._pack_transport, name, rev_index,
1690
1704
            txt_index = self._make_index(name, '.tix', resume=True)
1691
1705
            sig_index = self._make_index(name, '.six', resume=True)
1692
1706
            if self.chk_index is not None:
1693
 
                chk_index = self._make_index(name, '.cix', resume=True)
 
1707
                chk_index = self._make_index(name, '.cix', resume=True,
 
1708
                                             unlimited_cache=True)
1694
1709
            else:
1695
1710
                chk_index = None
1696
1711
            result = self.resumed_pack_factory(name, rev_index, inv_index,
1726
1741
        return self._index_class(self.transport, 'pack-names', None
1727
1742
                ).iter_all_entries()
1728
1743
 
1729
 
    def _make_index(self, name, suffix, resume=False):
 
1744
    def _make_index(self, name, suffix, resume=False, unlimited_cache=False):
1730
1745
        size_offset = self._suffix_offsets[suffix]
1731
1746
        index_name = name + suffix
1732
1747
        if resume:
1735
1750
        else:
1736
1751
            transport = self._index_transport
1737
1752
            index_size = self._names[name][size_offset]
1738
 
        return self._index_class(transport, index_name, index_size)
 
1753
        return self._index_class(transport, index_name, index_size,
 
1754
                                 unlimited_cache=unlimited_cache)
1739
1755
 
1740
1756
    def _max_pack_count(self, total_revisions):
1741
1757
        """Return the maximum number of packs to use for total revisions.
2054
2070
            self._remove_pack_indices(resumed_pack)
2055
2071
        del self._resumed_packs[:]
2056
2072
 
 
2073
    def _check_new_inventories(self):
 
2074
        """Detect missing inventories in this write group.
 
2075
 
 
2076
        :returns: list of strs, summarising any problems found.  If the list is
 
2077
            empty no problems were found.
 
2078
        """
 
2079
        # The base implementation does no checks.  GCRepositoryPackCollection
 
2080
        # overrides this.
 
2081
        return []
 
2082
        
2057
2083
    def _commit_write_group(self):
2058
2084
        all_missing = set()
2059
2085
        for prefix, versioned_file in (
2068
2094
            raise errors.BzrCheckError(
2069
2095
                "Repository %s has missing compression parent(s) %r "
2070
2096
                 % (self.repo, sorted(all_missing)))
 
2097
        problems = self._check_new_inventories()
 
2098
        if problems:
 
2099
            problems_summary = '\n'.join(problems)
 
2100
            raise errors.BzrCheckError(
 
2101
                "Cannot add revision(s) to repository: " + problems_summary)
2071
2102
        self._remove_pack_indices(self._new_pack)
2072
 
        should_autopack = False
 
2103
        any_new_content = False
2073
2104
        if self._new_pack.data_inserted():
2074
2105
            # get all the data to disk and read to use
2075
2106
            self._new_pack.finish()
2076
2107
            self.allocate(self._new_pack)
2077
2108
            self._new_pack = None
2078
 
            should_autopack = True
 
2109
            any_new_content = True
2079
2110
        else:
2080
2111
            self._new_pack.abort()
2081
2112
            self._new_pack = None
2086
2117
            self._remove_pack_from_memory(resumed_pack)
2087
2118
            resumed_pack.finish()
2088
2119
            self.allocate(resumed_pack)
2089
 
            should_autopack = True
 
2120
            any_new_content = True
2090
2121
        del self._resumed_packs[:]
2091
 
        if should_autopack:
2092
 
            if not self.autopack():
 
2122
        if any_new_content:
 
2123
            result = self.autopack()
 
2124
            if not result:
2093
2125
                # when autopack takes no steps, the names list is still
2094
2126
                # unsaved.
2095
2127
                return self._save_pack_names()
 
2128
            return result
 
2129
        return []
2096
2130
 
2097
2131
    def _suspend_write_group(self):
2098
2132
        tokens = [pack.name for pack in self._resumed_packs]
2212
2246
                    % (self._format, self.bzrdir.transport.base))
2213
2247
 
2214
2248
    def _abort_write_group(self):
2215
 
        self.revisions._index._key_dependencies.refs.clear()
 
2249
        self.revisions._index._key_dependencies.clear()
2216
2250
        self._pack_collection._abort_write_group()
2217
2251
 
2218
 
    def _find_inconsistent_revision_parents(self):
2219
 
        """Find revisions with incorrectly cached parents.
2220
 
 
2221
 
        :returns: an iterator yielding tuples of (revison-id, parents-in-index,
2222
 
            parents-in-revision).
2223
 
        """
2224
 
        if not self.is_locked():
2225
 
            raise errors.ObjectNotLocked(self)
2226
 
        pb = ui.ui_factory.nested_progress_bar()
2227
 
        result = []
2228
 
        try:
2229
 
            revision_nodes = self._pack_collection.revision_index \
2230
 
                .combined_index.iter_all_entries()
2231
 
            index_positions = []
2232
 
            # Get the cached index values for all revisions, and also the
2233
 
            # location in each index of the revision text so we can perform
2234
 
            # linear IO.
2235
 
            for index, key, value, refs in revision_nodes:
2236
 
                node = (index, key, value, refs)
2237
 
                index_memo = self.revisions._index._node_to_position(node)
2238
 
                if index_memo[0] != index:
2239
 
                    raise AssertionError('%r != %r' % (index_memo[0], index))
2240
 
                index_positions.append((index_memo, key[0],
2241
 
                                       tuple(parent[0] for parent in refs[0])))
2242
 
                pb.update("Reading revision index", 0, 0)
2243
 
            index_positions.sort()
2244
 
            batch_size = 1000
2245
 
            pb.update("Checking cached revision graph", 0,
2246
 
                      len(index_positions))
2247
 
            for offset in xrange(0, len(index_positions), 1000):
2248
 
                pb.update("Checking cached revision graph", offset)
2249
 
                to_query = index_positions[offset:offset + batch_size]
2250
 
                if not to_query:
2251
 
                    break
2252
 
                rev_ids = [item[1] for item in to_query]
2253
 
                revs = self.get_revisions(rev_ids)
2254
 
                for revision, item in zip(revs, to_query):
2255
 
                    index_parents = item[2]
2256
 
                    rev_parents = tuple(revision.parent_ids)
2257
 
                    if index_parents != rev_parents:
2258
 
                        result.append((revision.revision_id, index_parents,
2259
 
                                       rev_parents))
2260
 
        finally:
2261
 
            pb.finished()
2262
 
        return result
2263
 
 
2264
2252
    def _get_source(self, to_format):
2265
2253
        if to_format.network_name() == self._format.network_name():
2266
2254
            return KnitPackStreamSource(self, to_format)
2278
2266
        self._pack_collection._start_write_group()
2279
2267
 
2280
2268
    def _commit_write_group(self):
2281
 
        self.revisions._index._key_dependencies.refs.clear()
2282
 
        return self._pack_collection._commit_write_group()
 
2269
        hint = self._pack_collection._commit_write_group()
 
2270
        self.revisions._index._key_dependencies.clear()
 
2271
        return hint
2283
2272
 
2284
2273
    def suspend_write_group(self):
2285
2274
        # XXX check self._write_group is self.get_transaction()?
2286
2275
        tokens = self._pack_collection._suspend_write_group()
2287
 
        self.revisions._index._key_dependencies.refs.clear()
 
2276
        self.revisions._index._key_dependencies.clear()
2288
2277
        self._write_group = None
2289
2278
        return tokens
2290
2279
 
2318
2307
        if self._write_lock_count == 1:
2319
2308
            self._transaction = transactions.WriteTransaction()
2320
2309
        if not locked:
 
2310
            if 'relock' in debug.debug_flags and self._prev_lock == 'w':
 
2311
                note('%r was write locked again', self)
 
2312
            self._prev_lock = 'w'
2321
2313
            for repo in self._fallback_repositories:
2322
2314
                # Writes don't affect fallback repos
2323
2315
                repo.lock_read()
2330
2322
        else:
2331
2323
            self.control_files.lock_read()
2332
2324
        if not locked:
 
2325
            if 'relock' in debug.debug_flags and self._prev_lock == 'r':
 
2326
                note('%r was read locked again', self)
 
2327
            self._prev_lock = 'r'
2333
2328
            for repo in self._fallback_repositories:
2334
2329
                repo.lock_read()
2335
2330
            self._refresh_data()
2363
2358
        packer = ReconcilePacker(collection, packs, extension, revs)
2364
2359
        return packer.pack(pb)
2365
2360
 
 
2361
    @only_raises(errors.LockNotHeld, errors.LockBroken)
2366
2362
    def unlock(self):
2367
2363
        if self._write_lock_count == 1 and self._write_group is not None:
2368
2364
            self.abort_write_group()
2565
2561
        """See RepositoryFormat.get_format_description()."""
2566
2562
        return "Packs containing knits without subtree support"
2567
2563
 
2568
 
    def check_conversion_target(self, target_format):
2569
 
        pass
2570
 
 
2571
2564
 
2572
2565
class RepositoryFormatKnitPack3(RepositoryFormatPack):
2573
2566
    """A subtrees parameterized Pack repository.
2599
2592
 
2600
2593
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2601
2594
 
2602
 
    def check_conversion_target(self, target_format):
2603
 
        if not target_format.rich_root_data:
2604
 
            raise errors.BadConversionTarget(
2605
 
                'Does not support rich root data.', target_format)
2606
 
        if not getattr(target_format, 'supports_tree_reference', False):
2607
 
            raise errors.BadConversionTarget(
2608
 
                'Does not support nested trees', target_format)
2609
 
 
2610
2595
    def get_format_string(self):
2611
2596
        """See RepositoryFormat.get_format_string()."""
2612
2597
        return "Bazaar pack repository format 1 with subtree support (needs bzr 0.92)\n"
2645
2630
 
2646
2631
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2647
2632
 
2648
 
    def check_conversion_target(self, target_format):
2649
 
        if not target_format.rich_root_data:
2650
 
            raise errors.BadConversionTarget(
2651
 
                'Does not support rich root data.', target_format)
2652
 
 
2653
2633
    def get_format_string(self):
2654
2634
        """See RepositoryFormat.get_format_string()."""
2655
2635
        return ("Bazaar pack repository format 1 with rich root"
2696
2676
        """See RepositoryFormat.get_format_description()."""
2697
2677
        return "Packs 5 (adds stacking support, requires bzr 1.6)"
2698
2678
 
2699
 
    def check_conversion_target(self, target_format):
2700
 
        pass
2701
 
 
2702
2679
 
2703
2680
class RepositoryFormatKnitPack5RichRoot(RepositoryFormatPack):
2704
2681
    """A repository with rich roots and stacking.
2731
2708
 
2732
2709
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2733
2710
 
2734
 
    def check_conversion_target(self, target_format):
2735
 
        if not target_format.rich_root_data:
2736
 
            raise errors.BadConversionTarget(
2737
 
                'Does not support rich root data.', target_format)
2738
 
 
2739
2711
    def get_format_string(self):
2740
2712
        """See RepositoryFormat.get_format_string()."""
2741
2713
        return "Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6.1)\n"
2782
2754
 
2783
2755
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2784
2756
 
2785
 
    def check_conversion_target(self, target_format):
2786
 
        if not target_format.rich_root_data:
2787
 
            raise errors.BadConversionTarget(
2788
 
                'Does not support rich root data.', target_format)
2789
 
 
2790
2757
    def get_format_string(self):
2791
2758
        """See RepositoryFormat.get_format_string()."""
2792
2759
        return "Bazaar RepositoryFormatKnitPack5RichRoot (bzr 1.6)\n"
2830
2797
        """See RepositoryFormat.get_format_description()."""
2831
2798
        return "Packs 6 (uses btree indexes, requires bzr 1.9)"
2832
2799
 
2833
 
    def check_conversion_target(self, target_format):
2834
 
        pass
2835
 
 
2836
2800
 
2837
2801
class RepositoryFormatKnitPack6RichRoot(RepositoryFormatPack):
2838
2802
    """A repository with rich roots, no subtrees, stacking and btree indexes.
2862
2826
 
2863
2827
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2864
2828
 
2865
 
    def check_conversion_target(self, target_format):
2866
 
        if not target_format.rich_root_data:
2867
 
            raise errors.BadConversionTarget(
2868
 
                'Does not support rich root data.', target_format)
2869
 
 
2870
2829
    def get_format_string(self):
2871
2830
        """See RepositoryFormat.get_format_string()."""
2872
2831
        return "Bazaar RepositoryFormatKnitPack6RichRoot (bzr 1.9)\n"
2908
2867
 
2909
2868
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2910
2869
 
2911
 
    def check_conversion_target(self, target_format):
2912
 
        if not target_format.rich_root_data:
2913
 
            raise errors.BadConversionTarget(
2914
 
                'Does not support rich root data.', target_format)
2915
 
        if not getattr(target_format, 'supports_tree_reference', False):
2916
 
            raise errors.BadConversionTarget(
2917
 
                'Does not support nested trees', target_format)
2918
 
 
2919
2870
    def get_format_string(self):
2920
2871
        """See RepositoryFormat.get_format_string()."""
2921
2872
        return ("Bazaar development format 2 with subtree support "