/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/groupcompress_repo.py

  • Committer: Richard Wilbur
  • Date: 2016-02-04 19:07:28 UTC
  • mto: This revision was merged to the branch mainline in revision 6618.
  • Revision ID: richard.wilbur@gmail.com-20160204190728-p0zvfii6zase0fw7
Update COPYING.txt from the original http://www.gnu.org/licenses/gpl-2.0.txt  (Only differences were in whitespace.)  Thanks to Petr Stodulka for pointing out the discrepancy.

Show diffs side-by-side

added added

removed removed

Lines of Context:
20
20
 
21
21
import time
22
22
 
23
 
from .. import (
 
23
from bzrlib import (
24
24
    controldir,
 
25
    chk_map,
 
26
    chk_serializer,
25
27
    debug,
26
28
    errors,
 
29
    index as _mod_index,
 
30
    inventory,
27
31
    osutils,
 
32
    pack,
28
33
    revision as _mod_revision,
29
34
    trace,
30
35
    ui,
31
 
    )
32
 
from ..bzr import (
33
 
    chk_map,
34
 
    chk_serializer,
35
 
    index as _mod_index,
36
 
    inventory,
37
 
    pack,
38
36
    versionedfile,
39
37
    )
40
 
from ..bzr.btree_index import (
 
38
from bzrlib.btree_index import (
41
39
    BTreeGraphIndex,
42
40
    BTreeBuilder,
43
41
    )
44
 
from ..bzr.groupcompress import (
 
42
from bzrlib.decorators import needs_write_lock
 
43
from bzrlib.groupcompress import (
45
44
    _GCGraphIndex,
46
45
    GroupCompressVersionedFiles,
47
46
    )
48
 
from .pack_repo import (
 
47
from bzrlib.repofmt.pack_repo import (
49
48
    _DirectPackAccess,
50
49
    Pack,
51
50
    NewPack,
56
55
    ResumedPack,
57
56
    Packer,
58
57
    )
59
 
from ..bzr.vf_repository import (
 
58
from bzrlib.vf_repository import (
60
59
    StreamSource,
61
60
    )
62
 
from ..sixish import (
63
 
    viewitems,
64
 
    viewvalues,
65
 
    )
66
 
from ..static_tuple import StaticTuple
 
61
from bzrlib.static_tuple import StaticTuple
67
62
 
68
63
 
69
64
class GCPack(NewPack):
148
143
        # robertc says- this is a closure rather than a method on the object
149
144
        # so that the variables are locals, and faster than accessing object
150
145
        # members.
151
 
        def _write_data(data, flush=False, _buffer=self._buffer,
 
146
        def _write_data(bytes, flush=False, _buffer=self._buffer,
152
147
            _write=self.write_stream.write, _update=self._hash.update):
153
 
            _buffer[0].append(data)
154
 
            _buffer[1] += len(data)
 
148
            _buffer[0].append(bytes)
 
149
            _buffer[1] += len(bytes)
155
150
            # buffer cap
156
151
            if _buffer[1] > self._cache_limit or flush:
157
 
                data = b''.join(_buffer[0])
158
 
                _write(data)
159
 
                _update(data)
 
152
                bytes = ''.join(_buffer[0])
 
153
                _write(bytes)
 
154
                _update(bytes)
160
155
                _buffer[:] = [[], 0]
161
156
        # expose this on self, for the occasion when clients want to add data.
162
157
        self._write_data = _write_data
281
276
                remaining_keys.difference_update(cur_keys)
282
277
                next_keys = set()
283
278
                def handle_internal_node(node):
284
 
                    for prefix, value in viewitems(node._items):
 
279
                    for prefix, value in node._items.iteritems():
285
280
                        # We don't want to request the same key twice, and we
286
281
                        # want to order it by the first time it is seen.
287
282
                        # Even further, we don't want to request a key which is
402
397
                     pb_offset):
403
398
        trace.mutter('repacking %d %s', len(keys), message)
404
399
        self.pb.update('repacking %s' % (message,), pb_offset)
405
 
        with ui.ui_factory.nested_progress_bar() as child_pb:
 
400
        child_pb = ui.ui_factory.nested_progress_bar()
 
401
        try:
406
402
            stream = vf_to_stream(source_vf, keys, message, child_pb)
407
403
            for _ in target_vf._insert_record_stream(stream,
408
404
                                                     random_id=True,
409
405
                                                     reuse_blocks=False):
410
406
                pass
 
407
        finally:
 
408
            child_pb.finished()
411
409
 
412
410
    def _copy_revision_texts(self):
413
411
        source_vf, target_vf = self._build_vfs('revision', True, False)
455
453
                     len(self._chk_id_roots), len(self._chk_p_id_roots),
456
454
                     len(total_keys))
457
455
        self.pb.update('repacking chk', 3)
458
 
        with ui.ui_factory.nested_progress_bar() as child_pb:
 
456
        child_pb = ui.ui_factory.nested_progress_bar()
 
457
        try:
459
458
            for stream in self._get_chk_streams(source_vf, total_keys,
460
459
                                                pb=child_pb):
461
460
                for _ in target_vf._insert_record_stream(stream,
462
461
                                                         random_id=True,
463
462
                                                         reuse_blocks=False):
464
463
                    pass
 
464
        finally:
 
465
            child_pb.finished()
465
466
 
466
467
    def _copy_text_texts(self):
467
468
        source_vf, target_vf = self._build_vfs('text', True, True)
513
514
class GCCHKReconcilePacker(GCCHKPacker):
514
515
    """A packer which regenerates indices etc as it copies.
515
516
 
516
 
    This is used by ``brz reconcile`` to cause parent text pointers to be
 
517
    This is used by ``bzr reconcile`` to cause parent text pointers to be
517
518
    regenerated.
518
519
    """
519
520
 
542
543
        ancestor_keys = revision_vf.get_parent_map(revision_vf.keys())
543
544
        # Strip keys back into revision_ids.
544
545
        ancestors = dict((k[0], tuple([p[0] for p in parents]))
545
 
                         for k, parents in viewitems(ancestor_keys))
 
546
                         for k, parents in ancestor_keys.iteritems())
546
547
        del ancestor_keys
547
548
        # TODO: _generate_text_key_index should be much cheaper to generate from
548
549
        #       a chk repository, rather than the current implementation
613
614
        This is useful to get the side-effects of generating a stream.
614
615
        """
615
616
        self.pb.update('scanning %s' % (message,), pb_offset)
616
 
        with ui.ui_factory.nested_progress_bar() as child_pb:
 
617
        child_pb = ui.ui_factory.nested_progress_bar()
 
618
        try:
617
619
            list(vf_to_stream(source_vf, keys, message, child_pb))
 
620
        finally:
 
621
            child_pb.finished()
618
622
 
619
623
    def _copy_inventory_texts(self):
620
624
        source_vf, target_vf = self._build_vfs('inventory', True, True)
661
665
                if search_key_name is None:
662
666
                    # Find the name corresponding to the search_key_func
663
667
                    search_key_reg = chk_map.search_key_registry
664
 
                    for search_key_name, func in viewitems(search_key_reg):
 
668
                    for search_key_name, func in search_key_reg.iteritems():
665
669
                        if func == chk_inv.id_to_entry._search_key_func:
666
670
                            break
667
671
                canonical_inv = inventory.CHKInventory.from_inventory(
737
741
        # any present parent inventories, which may be used when calculating
738
742
        # deltas for streaming.
739
743
        all_inv_keys = set(corresponding_invs)
740
 
        for parent_inv_keys in viewvalues(inv_parent_map):
 
744
        for parent_inv_keys in inv_parent_map.itervalues():
741
745
            all_inv_keys.update(parent_inv_keys)
742
746
        # Filter out ghost parents.
743
747
        all_inv_keys.intersection_update(
756
760
        if missing_chk_roots:
757
761
            problems.append(
758
762
                "missing referenced chk root keys: %s."
759
 
                "Run 'brz reconcile --canonicalize-chks' on the affected "
 
763
                "Run 'bzr reconcile --canonicalize-chks' on the affected "
760
764
                "repository."
761
765
                % (sorted(missing_chk_roots),))
762
766
            # Don't bother checking any further.
774
778
            for record in _filter_text_keys(chk_diff, text_keys,
775
779
                                            chk_map._bytes_to_text_key):
776
780
                pass
777
 
        except errors.NoSuchRevision as e:
 
781
        except errors.NoSuchRevision, e:
778
782
            # XXX: It would be nice if we could give a more precise error here.
779
783
            problems.append("missing chk node(s) for id_to_entry maps")
780
784
        chk_diff = chk_map.iter_interesting_nodes(
783
787
        try:
784
788
            for interesting_rec, interesting_map in chk_diff:
785
789
                pass
786
 
        except errors.NoSuchRevision as e:
 
790
        except errors.NoSuchRevision, e:
787
791
            problems.append(
788
792
                "missing chk node(s) for parent_id_basename_to_file_id maps")
789
793
        present_text_keys = no_fallback_texts_index.get_parent_map(text_keys)
797
801
class CHKInventoryRepository(PackRepository):
798
802
    """subclass of PackRepository that uses CHK based inventories."""
799
803
 
800
 
    def __init__(self, _format, a_controldir, control_files, _commit_builder_class,
 
804
    def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
801
805
        _serializer):
802
806
        """Overridden to change pack collection class."""
803
 
        super(CHKInventoryRepository, self).__init__(_format, a_controldir,
 
807
        super(CHKInventoryRepository, self).__init__(_format, a_bzrdir,
804
808
            control_files, _commit_builder_class, _serializer)
805
809
        index_transport = self._transport.clone('indices')
806
810
        self._pack_collection = GCRepositoryPackCollection(self,
897
901
                                 ' no new_path %r' % (file_id,))
898
902
            if new_path == '':
899
903
                new_inv.root_id = file_id
900
 
                parent_id_basename_key = StaticTuple(b'', b'').intern()
 
904
                parent_id_basename_key = StaticTuple('', '').intern()
901
905
            else:
902
906
                utf8_entry_name = entry.name.encode('utf-8')
903
907
                parent_id_basename_key = StaticTuple(entry.parent_id,
1016
1020
        rich_root = self.supports_rich_root()
1017
1021
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
1018
1022
        file_id_revisions = {}
1019
 
        with ui.ui_factory.nested_progress_bar() as pb:
 
1023
        pb = ui.ui_factory.nested_progress_bar()
 
1024
        try:
1020
1025
            revision_keys = [(r,) for r in revision_ids]
1021
1026
            parent_keys = self._find_parent_keys_of_revisions(revision_keys)
1022
1027
            # TODO: instead of using _find_present_inventory_keys, change the
1025
1030
            #       inventories, not missing inventories for revision_ids
1026
1031
            present_parent_inv_keys = self._find_present_inventory_keys(
1027
1032
                                        parent_keys)
1028
 
            present_parent_inv_ids = {k[-1] for k in present_parent_inv_keys}
 
1033
            present_parent_inv_ids = set(
 
1034
                [k[-1] for k in present_parent_inv_keys])
1029
1035
            inventories_to_read = set(revision_ids)
1030
1036
            inventories_to_read.update(present_parent_inv_ids)
1031
1037
            root_key_info = _build_interesting_key_sets(
1047
1053
                    try:
1048
1054
                        file_id_revisions[file_id].add(revision_id)
1049
1055
                    except KeyError:
1050
 
                        file_id_revisions[file_id] = {revision_id}
 
1056
                        file_id_revisions[file_id] = set([revision_id])
 
1057
        finally:
 
1058
            pb.finished()
1051
1059
        return file_id_revisions
1052
1060
 
1053
1061
    def find_text_key_references(self):
1065
1073
        revision_keys = self.revisions.keys()
1066
1074
        result = {}
1067
1075
        rich_roots = self.supports_rich_root()
1068
 
        with ui.ui_factory.nested_progress_bar() as pb:
 
1076
        pb = ui.ui_factory.nested_progress_bar()
 
1077
        try:
1069
1078
            all_revs = self.all_revision_ids()
1070
1079
            total = len(all_revs)
1071
1080
            for pos, inv in enumerate(self.iter_inventories(all_revs)):
1078
1087
                    if entry.revision == inv.revision_id:
1079
1088
                        result[key] = True
1080
1089
            return result
 
1090
        finally:
 
1091
            pb.finished()
1081
1092
 
 
1093
    @needs_write_lock
1082
1094
    def reconcile_canonicalize_chks(self):
1083
1095
        """Reconcile this repository to make sure all CHKs are in canonical
1084
1096
        form.
1085
1097
        """
1086
 
        from breezy.reconcile import PackReconciler
1087
 
        with self.lock_write():
1088
 
            reconciler = PackReconciler(self, thorough=True, canonicalize_chks=True)
1089
 
            reconciler.reconcile()
1090
 
            return reconciler
 
1098
        from bzrlib.reconcile import PackReconciler
 
1099
        reconciler = PackReconciler(self, thorough=True, canonicalize_chks=True)
 
1100
        reconciler.reconcile()
 
1101
        return reconciler
1091
1102
 
1092
1103
    def _reconcile_pack(self, collection, packs, extension, revs, pb):
1093
1104
        packer = GCCHKReconcilePacker(collection, packs, extension)
1120
1131
            raise AssertionError()
1121
1132
        vf = self.revisions
1122
1133
        if revisions_iterator is None:
1123
 
            revisions_iterator = self.iter_revisions(self.all_revision_ids())
 
1134
            revisions_iterator = self._iter_revisions(None)
1124
1135
        for revid, revision in revisions_iterator:
1125
1136
            if revision is None:
1126
1137
                pass
1346
1357
    """
1347
1358
    text_keys_update = text_keys.update
1348
1359
    for record, items in interesting_nodes_iterable:
1349
 
        text_keys_update([bytes_to_text_key(b) for n, b in items])
 
1360
        text_keys_update([bytes_to_text_key(b) for n,b in items])
1350
1361
        yield record
1351
1362
 
1352
1363
 
1375
1386
    pack_compresses = True
1376
1387
 
1377
1388
    def _get_matching_bzrdir(self):
1378
 
        return controldir.format_registry.make_controldir('2a')
 
1389
        return controldir.format_registry.make_bzrdir('2a')
1379
1390
 
1380
1391
    def _ignore_setting_bzrdir(self, format):
1381
1392
        pass
1382
1393
 
1383
 
    _matchingcontroldir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
 
1394
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1384
1395
 
1385
1396
    @classmethod
1386
1397
    def get_format_string(cls):
1387
 
        return b'Bazaar repository format 2a (needs bzr 1.16 or later)\n'
 
1398
        return ('Bazaar repository format 2a (needs bzr 1.16 or later)\n')
1388
1399
 
1389
1400
    def get_format_description(self):
1390
1401
        """See RepositoryFormat.get_format_description()."""
1398
1409
    """
1399
1410
 
1400
1411
    def _get_matching_bzrdir(self):
1401
 
        return controldir.format_registry.make_controldir('development-subtree')
 
1412
        return controldir.format_registry.make_bzrdir('development-subtree')
1402
1413
 
1403
1414
    def _ignore_setting_bzrdir(self, format):
1404
1415
        pass
1405
1416
 
1406
 
    _matchingcontroldir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
 
1417
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1407
1418
 
1408
1419
    @classmethod
1409
1420
    def get_format_string(cls):
1410
 
        return b'Bazaar development format 8\n'
 
1421
        return ('Bazaar development format 8\n')
1411
1422
 
1412
1423
    def get_format_description(self):
1413
1424
        """See RepositoryFormat.get_format_description()."""