/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-12-10 17:16:19 UTC
  • mfrom: (4884 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4889.
  • Revision ID: john@arbash-meinel.com-20091210171619-ehdcxjbl8afhq9g1
Bring in bzr.dev 4884

Show diffs side-by-side

added added

removed removed

Lines of Context:
26
26
    chk_map,
27
27
    debug,
28
28
    errors,
 
29
    fetch as _mod_fetch,
29
30
    fifo_cache,
30
31
    generate_ids,
31
32
    gpg,
49
50
from bzrlib.testament import Testament
50
51
""")
51
52
 
52
 
from bzrlib.decorators import needs_read_lock, needs_write_lock
 
53
from bzrlib.decorators import needs_read_lock, needs_write_lock, only_raises
53
54
from bzrlib.inter import InterObject
54
55
from bzrlib.inventory import (
55
56
    Inventory,
57
58
    ROOT_ID,
58
59
    entry_factory,
59
60
    )
 
61
from bzrlib.lock import _RelockDebugMixin
60
62
from bzrlib import registry
61
63
from bzrlib.trace import (
62
64
    log_exception_quietly, note, mutter, mutter_callsite, warning)
205
207
            # an inventory delta was accumulated without creating a new
206
208
            # inventory.
207
209
            basis_id = self.basis_delta_revision
208
 
            self.inv_sha1 = self.repository.add_inventory_by_delta(
 
210
            # We ignore the 'inventory' returned by add_inventory_by_delta
 
211
            # because self.new_inventory is used to hint to the rest of the
 
212
            # system what code path was taken
 
213
            self.inv_sha1, _ = self.repository.add_inventory_by_delta(
209
214
                basis_id, self._basis_delta, self._new_revision_id,
210
215
                self.parents)
211
216
        else:
856
861
# Repositories
857
862
 
858
863
 
859
 
class Repository(object):
 
864
class Repository(_RelockDebugMixin):
860
865
    """Repository holding history for one or more branches.
861
866
 
862
867
    The repository holds and retrieves historical information including
1381
1386
        locked = self.is_locked()
1382
1387
        result = self.control_files.lock_write(token=token)
1383
1388
        if not locked:
 
1389
            self._note_lock('w')
1384
1390
            for repo in self._fallback_repositories:
1385
1391
                # Writes don't affect fallback repos
1386
1392
                repo.lock_read()
1391
1397
        locked = self.is_locked()
1392
1398
        self.control_files.lock_read()
1393
1399
        if not locked:
 
1400
            self._note_lock('r')
1394
1401
            for repo in self._fallback_repositories:
1395
1402
                repo.lock_read()
1396
1403
            self._refresh_data()
1720
1727
        self.start_write_group()
1721
1728
        return result
1722
1729
 
 
1730
    @only_raises(errors.LockNotHeld, errors.LockBroken)
1723
1731
    def unlock(self):
1724
1732
        if (self.control_files._lock_count == 1 and
1725
1733
            self.control_files._lock_mode == 'w'):
2329
2337
        num_file_ids = len(file_ids)
2330
2338
        for file_id, altered_versions in file_ids.iteritems():
2331
2339
            if pb is not None:
2332
 
                pb.update("fetch texts", count, num_file_ids)
 
2340
                pb.update("Fetch texts", count, num_file_ids)
2333
2341
            count += 1
2334
2342
            yield ("file", file_id, altered_versions)
2335
2343
 
2661
2669
        for ((revision_id,), parent_keys) in \
2662
2670
                self.revisions.get_parent_map(query_keys).iteritems():
2663
2671
            if parent_keys:
2664
 
                result[revision_id] = tuple(parent_revid
2665
 
                    for (parent_revid,) in parent_keys)
 
2672
                result[revision_id] = tuple([parent_revid
 
2673
                    for (parent_revid,) in parent_keys])
2666
2674
            else:
2667
2675
                result[revision_id] = (_mod_revision.NULL_REVISION,)
2668
2676
        return result
3086
3094
        """
3087
3095
        try:
3088
3096
            transport = a_bzrdir.get_repository_transport(None)
3089
 
            format_string = transport.get("format").read()
 
3097
            format_string = transport.get_bytes("format")
3090
3098
            return format_registry.get(format_string)
3091
3099
        except errors.NoSuchFile:
3092
3100
            raise errors.NoRepositoryPresent(a_bzrdir)
3405
3413
                   provided a default one will be created.
3406
3414
        :return: None.
3407
3415
        """
3408
 
        from bzrlib.fetch import RepoFetcher
3409
 
        f = RepoFetcher(to_repository=self.target,
 
3416
        f = _mod_fetch.RepoFetcher(to_repository=self.target,
3410
3417
                               from_repository=self.source,
3411
3418
                               last_revision=revision_id,
3412
3419
                               fetch_spec=fetch_spec,
3585
3592
                self.target.texts.insert_record_stream(
3586
3593
                    self.source.texts.get_record_stream(
3587
3594
                        self.source.texts.keys(), 'topological', False))
3588
 
                pb.update('copying inventory', 0, 1)
 
3595
                pb.update('Copying inventory', 0, 1)
3589
3596
                self.target.inventories.insert_record_stream(
3590
3597
                    self.source.inventories.get_record_stream(
3591
3598
                        self.source.inventories.keys(), 'topological', False))
3812
3819
                basis_id, delta, current_revision_id, parents_parents)
3813
3820
            cache[current_revision_id] = parent_tree
3814
3821
 
3815
 
    def _fetch_batch(self, revision_ids, basis_id, cache):
 
3822
    def _fetch_batch(self, revision_ids, basis_id, cache, a_graph=None):
3816
3823
        """Fetch across a few revisions.
3817
3824
 
3818
3825
        :param revision_ids: The revisions to copy
3819
3826
        :param basis_id: The revision_id of a tree that must be in cache, used
3820
3827
            as a basis for delta when no other base is available
3821
3828
        :param cache: A cache of RevisionTrees that we can use.
 
3829
        :param a_graph: A Graph object to determine the heads() of the
 
3830
            rich-root data stream.
3822
3831
        :return: The revision_id of the last converted tree. The RevisionTree
3823
3832
            for it will be in cache
3824
3833
        """
3888
3897
        from_texts = self.source.texts
3889
3898
        to_texts = self.target.texts
3890
3899
        if root_keys_to_create:
3891
 
            from bzrlib.fetch import _new_root_data_stream
3892
 
            root_stream = _new_root_data_stream(
 
3900
            root_stream = _mod_fetch._new_root_data_stream(
3893
3901
                root_keys_to_create, self._revision_id_to_root_id, parent_map,
3894
 
                self.source)
 
3902
                self.source, graph=a_graph)
3895
3903
            to_texts.insert_record_stream(root_stream)
3896
3904
        to_texts.insert_record_stream(from_texts.get_record_stream(
3897
3905
            text_keys, self.target._format._fetch_order,
3954
3962
        cache[basis_id] = basis_tree
3955
3963
        del basis_tree # We don't want to hang on to it here
3956
3964
        hints = []
 
3965
        if self._converting_to_rich_root and len(revision_ids) > 100:
 
3966
            a_graph = _mod_fetch._get_rich_root_heads_graph(self.source,
 
3967
                                                            revision_ids)
 
3968
        else:
 
3969
            a_graph = None
 
3970
 
3957
3971
        for offset in range(0, len(revision_ids), batch_size):
3958
3972
            self.target.start_write_group()
3959
3973
            try:
3960
3974
                pb.update('Transferring revisions', offset,
3961
3975
                          len(revision_ids))
3962
3976
                batch = revision_ids[offset:offset+batch_size]
3963
 
                basis_id = self._fetch_batch(batch, basis_id, cache)
 
3977
                basis_id = self._fetch_batch(batch, basis_id, cache,
 
3978
                                             a_graph=a_graph)
3964
3979
            except:
3965
3980
                self.target.abort_write_group()
3966
3981
                raise
4078
4093
                                                  self.source_repo.is_shared())
4079
4094
        converted.lock_write()
4080
4095
        try:
4081
 
            self.step('Copying content into repository.')
 
4096
            self.step('Copying content')
4082
4097
            self.source_repo.copy_content_into(converted)
4083
4098
        finally:
4084
4099
            converted.unlock()
4085
 
        self.step('Deleting old repository content.')
 
4100
        self.step('Deleting old repository content')
4086
4101
        self.repo_dir.transport.delete_tree('repository.backup')
4087
 
        self.pb.note('repository converted')
 
4102
        ui.ui_factory.note('repository converted')
4088
4103
 
4089
4104
    def step(self, message):
4090
4105
        """Update the pb by a step."""
4315
4330
                ):
4316
4331
                if versioned_file is None:
4317
4332
                    continue
 
4333
                # TODO: key is often going to be a StaticTuple object
 
4334
                #       I don't believe we can define a method by which
 
4335
                #       (prefix,) + StaticTuple will work, though we could
 
4336
                #       define a StaticTuple.sq_concat that would allow you to
 
4337
                #       pass in either a tuple or a StaticTuple as the second
 
4338
                #       object, so instead we could have:
 
4339
                #       StaticTuple(prefix) + key here...
4318
4340
                missing_keys.update((prefix,) + key for key in
4319
4341
                    versioned_file.get_missing_compression_parent_keys())
4320
4342
        except NotImplementedError:
4432
4454
        fetching the inventory weave.
4433
4455
        """
4434
4456
        if self._rich_root_upgrade():
4435
 
            import bzrlib.fetch
4436
 
            return bzrlib.fetch.Inter1and2Helper(
 
4457
            return _mod_fetch.Inter1and2Helper(
4437
4458
                self.from_repository).generate_root_texts(revs)
4438
4459
        else:
4439
4460
            return []