/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/interrepo.py

  • Committer: Gustav Hartvigsson
  • Date: 2021-01-09 21:36:27 UTC
  • Revision ID: gustav.hartvigsson@gmail.com-20210109213627-h1xwcutzy9m7a99b
Added 'Case Preserving Working Tree Use Cases' from Canonical Wiki

* Addod a page from the Canonical Bazaar wiki
  with information on the scmeatics of case
  perserving filesystems an a case insensitive
  filesystem works.
  
  * Needs re-work, but this will do as it is the
    same inforamoton as what was on the linked
    page in the currint documentation.

Show diffs side-by-side

added added

removed removed

Lines of Context:
16
16
 
17
17
"""InterRepository operations."""
18
18
 
19
 
from __future__ import absolute_import
20
 
 
21
19
from io import BytesIO
22
20
import itertools
23
21
 
46
44
    NoSuchRevision,
47
45
    )
48
46
from ..repository import (
 
47
    FetchResult,
49
48
    InterRepository,
50
49
    )
51
50
from ..revision import (
52
51
    NULL_REVISION,
53
52
    )
54
 
from ..sixish import (
55
 
    viewitems,
56
 
    viewvalues,
57
 
    )
58
53
from .. import (
59
54
    config,
60
55
    trace,
80
75
    )
81
76
from .refs import (
82
77
    is_tag,
 
78
    ref_to_tag_name,
83
79
    )
84
80
from .repository import (
85
81
    GitRepository,
88
84
    )
89
85
from .remote import (
90
86
    RemoteGitRepository,
 
87
    RemoteGitError,
91
88
    )
92
89
from .unpeel_map import (
93
90
    UnpeelMap,
216
213
                stop_revids.append(revid)
217
214
        missing = set()
218
215
        graph = self.source.get_graph()
219
 
        pb = ui.ui_factory.nested_progress_bar()
220
 
        try:
 
216
        with ui.ui_factory.nested_progress_bar() as pb:
221
217
            while stop_revids:
222
218
                new_stop_revids = []
223
219
                for revid in stop_revids:
228
224
                        new_stop_revids.append(revid)
229
225
                stop_revids = set()
230
226
                parent_map = graph.get_parent_map(new_stop_revids)
231
 
                for parent_revids in viewvalues(parent_map):
 
227
                for parent_revids in parent_map.values():
232
228
                    stop_revids.update(parent_revids)
233
229
                pb.update("determining revisions to fetch", len(missing))
234
 
        finally:
235
 
            pb.finished()
236
230
        return graph.iter_topo_order(missing)
237
231
 
238
232
    def _get_target_bzr_refs(self):
249
243
                # broken symref?
250
244
                continue
251
245
            revid = None
252
 
            if not v.startswith(SYMREF):
 
246
            if v and not v.startswith(SYMREF):
253
247
                try:
254
248
                    for (kind, type_data) in self.source_store.lookup_git_sha(
255
249
                            v):
273
267
                 for (git_sha, bzr_revid) in new_refs.values()
274
268
                 if git_sha is None or not git_sha.startswith(SYMREF)],
275
269
                lossy=lossy)
276
 
            for name, (gitid, revid) in viewitems(new_refs):
 
270
            for name, (gitid, revid) in new_refs.items():
277
271
                if gitid is None:
278
272
                    try:
279
273
                        gitid = revidmap[revid][0]
302
296
        with self.source_store.lock_read():
303
297
            todo = list(self.missing_revisions(revs))[:limit]
304
298
            revidmap = {}
305
 
            pb = ui.ui_factory.nested_progress_bar()
306
 
            try:
 
299
            with ui.ui_factory.nested_progress_bar() as pb:
307
300
                object_generator = MissingObjectsIterator(
308
301
                    self.source_store, self.source, pb)
309
302
                for (old_revid, git_sha) in object_generator.import_revisions(
316
309
                        try:
317
310
                            self.mapping.revision_id_bzr_to_foreign(old_revid)
318
311
                        except InvalidRevisionId:
319
 
                            refname = self.mapping.revid_as_refname(old_revid)
320
 
                            self.target_refs[refname] = git_sha
 
312
                            pass
321
313
                    revidmap[old_revid] = (git_sha, new_revid)
322
314
                self.target_store.add_objects(object_generator)
323
315
                return revidmap
324
 
            finally:
325
 
                pb.finished()
326
316
 
327
317
    def fetch(self, revision_id=None, pb=None, find_ghosts=False,
328
 
              fetch_spec=None, mapped_refs=None):
 
318
              fetch_spec=None, mapped_refs=None, lossy=False):
329
319
        if mapped_refs is not None:
330
320
            stop_revisions = mapped_refs
331
321
        elif revision_id is not None:
342
332
                              for revid in self.source.all_revision_ids()]
343
333
        self._warn_slow()
344
334
        try:
345
 
            self.fetch_objects(stop_revisions, lossy=False)
 
335
            revidmap = self.fetch_objects(stop_revisions, lossy=lossy)
346
336
        except NoPushSupport:
347
337
            raise NoRoundtrippingSupport(self.source, self.target)
 
338
        return FetchResult(revidmap)
348
339
 
349
340
    @staticmethod
350
341
    def is_compatible(source, target):
365
356
        def git_update_refs(old_refs):
366
357
            ret = {}
367
358
            self.old_refs = {
368
 
                k: (v, None) for (k, v) in viewitems(old_refs)}
 
359
                k: (v, None) for (k, v) in old_refs.items()}
369
360
            new_refs = update_refs(self.old_refs)
370
 
            for name, (gitid, revid) in viewitems(new_refs):
 
361
            for name, (gitid, revid) in new_refs.items():
371
362
                if gitid is None:
372
363
                    git_sha = self.source_store._lookup_revision_sha1(revid)
373
364
                    gitid = unpeel_map.re_unpeel_tag(
380
371
            return ret
381
372
        self._warn_slow()
382
373
        with self.source_store.lock_read():
383
 
            new_refs = self.target.send_pack(
 
374
            result = self.target.send_pack(
384
375
                git_update_refs, self.source_store.generate_lossy_pack_data)
 
376
            if result is not None and not isinstance(result, dict):
 
377
                for ref, error in result.ref_status.items():
 
378
                    if error:
 
379
                        raise RemoteGitError(
 
380
                            'unable to update ref %r: %s' % (ref, error))
 
381
                new_refs = result.refs
 
382
            else:  # dulwich < 0.20.3
 
383
                new_refs = result
385
384
        # FIXME: revidmap?
386
385
        return revidmap, self.old_refs, new_refs
387
386
 
413
412
    def _target_has_shas(self, shas):
414
413
        raise NotImplementedError(self._target_has_shas)
415
414
 
416
 
    def get_determine_wants_heads(self, wants, include_tags=False):
 
415
    def get_determine_wants_heads(self, wants, include_tags=False, tag_selector=None):
417
416
        wants = set(wants)
418
417
 
419
418
        def determine_wants(refs):
420
419
            unpeel_lookup = {}
421
 
            for k, v in viewitems(refs):
 
420
            for k, v in refs.items():
422
421
                if k.endswith(ANNOTATED_TAG_SUFFIX):
423
422
                    unpeel_lookup[v] = refs[k[:-len(ANNOTATED_TAG_SUFFIX)]]
424
423
            potential = set([unpeel_lookup.get(w, w) for w in wants])
425
424
            if include_tags:
426
 
                for k, sha in viewitems(refs):
 
425
                for k, sha in refs.items():
427
426
                    if k.endswith(ANNOTATED_TAG_SUFFIX):
428
427
                        continue
429
 
                    if not is_tag(k):
 
428
                    try:
 
429
                        tag_name = ref_to_tag_name(k)
 
430
                    except ValueError:
 
431
                        continue
 
432
                    if tag_selector and not tag_selector(tag_name):
430
433
                        continue
431
434
                    if sha == ZERO_SHA:
432
435
                        continue
462
465
            if if_present_ids is not None:
463
466
                todo.update(if_present_ids)
464
467
        result_set = todo.difference(self.target.all_revision_ids())
465
 
        result_parents = set(itertools.chain.from_iterable(viewvalues(
466
 
            self.source.get_graph().get_parent_map(result_set))))
 
468
        result_parents = set(itertools.chain.from_iterable(
 
469
            self.source.get_graph().get_parent_map(result_set).values()))
467
470
        included_keys = result_set.intersection(result_parents)
468
471
        start_keys = result_set.difference(included_keys)
469
472
        exclude_keys = result_parents.difference(result_set)
488
491
 
489
492
    def determine_wants_all(self, refs):
490
493
        potential = set()
491
 
        for k, v in viewitems(refs):
 
494
        for k, v in refs.items():
492
495
            # For non-git target repositories, only worry about peeled
493
496
            if v == ZERO_SHA:
494
497
                continue
512
515
        """
513
516
        raise NotImplementedError(self.fetch_objects)
514
517
 
515
 
    def get_determine_wants_revids(self, revids, include_tags=False):
 
518
    def get_determine_wants_revids(self, revids, include_tags=False, tag_selector=None):
516
519
        wants = set()
517
520
        for revid in set(revids):
518
521
            if self.target.has_revision(revid):
519
522
                continue
520
523
            git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
521
524
            wants.add(git_sha)
522
 
        return self.get_determine_wants_heads(wants, include_tags=include_tags)
 
525
        return self.get_determine_wants_heads(
 
526
            wants, include_tags=include_tags, tag_selector=tag_selector)
523
527
 
524
528
    def fetch(self, revision_id=None, find_ghosts=False,
525
 
              mapping=None, fetch_spec=None, include_tags=False):
 
529
              mapping=None, fetch_spec=None, include_tags=False, lossy=False):
526
530
        if mapping is None:
527
531
            mapping = self.source.get_mapping()
528
532
        if revision_id is not None:
543
547
        else:
544
548
            determine_wants = self.determine_wants_all
545
549
 
546
 
        (pack_hint, _, remote_refs) = self.fetch_objects(determine_wants,
547
 
                                                         mapping)
 
550
        (pack_hint, _, remote_refs) = self.fetch_objects(
 
551
            determine_wants, mapping, lossy=lossy)
548
552
        if pack_hint is not None and self.target._format.pack_compresses:
549
553
            self.target.pack(hint=pack_hint)
550
 
        return remote_refs
 
554
        result = FetchResult()
 
555
        result.refs = remote_refs
 
556
        return result
551
557
 
552
558
 
553
559
class InterRemoteGitNonGitRepository(InterGitNonGitRepository):
559
565
        all_revs = self.target.all_revision_ids()
560
566
        parent_map = self.target.get_parent_map(all_revs)
561
567
        all_parents = set()
562
 
        for values in viewvalues(parent_map):
 
568
        for values in parent_map.values():
563
569
            all_parents.update(values)
564
570
        return set(all_revs) - all_parents
565
571
 
574
580
                lambda sha: store[sha].parents)
575
581
            wants_recorder = DetermineWantsRecorder(determine_wants)
576
582
 
577
 
            pb = ui.ui_factory.nested_progress_bar()
578
 
            try:
 
583
            with ui.ui_factory.nested_progress_bar() as pb:
579
584
                objects_iter = self.source.fetch_objects(
580
585
                    wants_recorder, graph_walker, store.get_raw)
581
586
                trace.mutter("Importing %d new revisions",
584
589
                    self.target, mapping, objects_iter, store,
585
590
                    wants_recorder.wants, pb, limit)
586
591
                return (pack_hint, last_rev, wants_recorder.remote_refs)
587
 
            finally:
588
 
                pb.finished()
589
592
 
590
593
    @staticmethod
591
594
    def is_compatible(source, target):
610
613
        self._warn_slow()
611
614
        remote_refs = self.source.controldir.get_refs_container().as_dict()
612
615
        wants = determine_wants(remote_refs)
613
 
        pb = ui.ui_factory.nested_progress_bar()
614
616
        target_git_object_retriever = get_object_store(self.target, mapping)
615
 
        try:
 
617
        with ui.ui_factory.nested_progress_bar() as pb:
616
618
            target_git_object_retriever.lock_write()
617
619
            try:
618
620
                (pack_hint, last_rev) = import_git_objects(
621
623
                return (pack_hint, last_rev, remote_refs)
622
624
            finally:
623
625
                target_git_object_retriever.unlock()
624
 
        finally:
625
 
            pb.finished()
626
626
 
627
627
    @staticmethod
628
628
    def is_compatible(source, target):
649
649
 
650
650
        def determine_wants(heads):
651
651
            old_refs = dict([(k, (v, None))
652
 
                             for (k, v) in viewitems(heads.as_dict())])
 
652
                             for (k, v) in heads.as_dict().items()])
653
653
            new_refs = update_refs(old_refs)
654
654
            ref_changes.update(new_refs)
655
 
            return [sha1 for (sha1, bzr_revid) in viewvalues(new_refs)]
 
655
            return [sha1 for (sha1, bzr_revid) in new_refs.values()]
656
656
        self.fetch_objects(determine_wants, lossy=lossy)
657
 
        for k, (git_sha, bzr_revid) in viewitems(ref_changes):
 
657
        for k, (git_sha, bzr_revid) in ref_changes.items():
658
658
            self.target._git.refs[k] = git_sha
659
659
        new_refs = self.target.controldir.get_refs_container()
660
660
        return None, old_refs, new_refs
669
669
 
670
670
    def fetch(self, revision_id=None, find_ghosts=False,
671
671
              mapping=None, fetch_spec=None, branches=None, limit=None,
672
 
              include_tags=False):
 
672
              include_tags=False, lossy=False):
673
673
        if mapping is None:
674
674
            mapping = self.source.get_mapping()
675
675
        if revision_id is not None:
683
683
                    "Unsupported search result type %s" % recipe[0])
684
684
            args = heads
685
685
        if branches is not None:
686
 
            def determine_wants(refs):
687
 
                ret = []
688
 
                for name, value in viewitems(refs):
689
 
                    if value == ZERO_SHA:
690
 
                        continue
691
 
 
692
 
                    if name in branches or (include_tags and is_tag(name)):
693
 
                        ret.append(value)
694
 
                return ret
 
686
            determine_wants = self.get_determine_wants_branches(
 
687
                branches, include_tags=include_tags)
695
688
        elif fetch_spec is None and revision_id is None:
696
689
            determine_wants = self.determine_wants_all
697
690
        else:
698
691
            determine_wants = self.get_determine_wants_revids(
699
692
                args, include_tags=include_tags)
700
693
        wants_recorder = DetermineWantsRecorder(determine_wants)
701
 
        self.fetch_objects(wants_recorder, mapping, limit=limit)
702
 
        return wants_recorder.remote_refs
 
694
        self.fetch_objects(wants_recorder, mapping, limit=limit, lossy=lossy)
 
695
        result = FetchResult()
 
696
        result.refs = wants_recorder.remote_refs
 
697
        return result
703
698
 
704
 
    def get_determine_wants_revids(self, revids, include_tags=False):
 
699
    def get_determine_wants_revids(self, revids, include_tags=False, tag_selector=None):
705
700
        wants = set()
706
701
        for revid in set(revids):
707
702
            if revid == NULL_REVISION:
708
703
                continue
709
704
            git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
710
705
            wants.add(git_sha)
711
 
        return self.get_determine_wants_heads(wants, include_tags=include_tags)
 
706
        return self.get_determine_wants_heads(wants, include_tags=include_tags, tag_selector=tag_selector)
 
707
 
 
708
    def get_determine_wants_branches(self, branches, include_tags=False):
 
709
        def determine_wants(refs):
 
710
            ret = []
 
711
            for name, value in refs.items():
 
712
                if value == ZERO_SHA:
 
713
                    continue
 
714
 
 
715
                if name.endswith(ANNOTATED_TAG_SUFFIX):
 
716
                    continue
 
717
 
 
718
                if name in branches or (include_tags and is_tag(name)):
 
719
                    ret.append(value)
 
720
            return ret
 
721
        return determine_wants
712
722
 
713
723
    def determine_wants_all(self, refs):
714
724
        potential = set([
726
736
        if limit is not None:
727
737
            raise FetchLimitUnsupported(self)
728
738
        from .remote import DefaultProgressReporter
729
 
        pb = ui.ui_factory.nested_progress_bar()
730
 
        progress = DefaultProgressReporter(pb).progress
731
 
        try:
 
739
        with ui.ui_factory.nested_progress_bar() as pb:
 
740
            progress = DefaultProgressReporter(pb).progress
732
741
            refs = self.source._git.fetch(
733
742
                self.target._git, determine_wants,
734
743
                progress=progress)
735
 
        finally:
736
 
            pb.finished()
737
744
        return (None, None, refs)
738
745
 
739
746
    @staticmethod
781
788
        """Be compatible with GitRepository."""
782
789
        return (isinstance(source, RemoteGitRepository) and
783
790
                isinstance(target, LocalGitRepository))
 
791
 
 
792
 
 
793
 
 
794
class InterLocalGitRemoteGitRepository(InterToGitRepository):
 
795
 
 
796
    def fetch_refs(self, update_refs, lossy=False, overwrite=False):
 
797
        """Import the gist of the ancestry of a particular revision."""
 
798
        if lossy:
 
799
            raise LossyPushToSameVCS(self.source, self.target)
 
800
 
 
801
        def git_update_refs(old_refs):
 
802
            ret = {}
 
803
            self.old_refs = {
 
804
                k: (v, None) for (k, v) in viewitems(old_refs)}
 
805
            new_refs = update_refs(self.old_refs)
 
806
            for name, (gitid, revid) in viewitems(new_refs):
 
807
                if gitid is None:
 
808
                    gitid = self.source_store._lookup_revision_sha1(revid)
 
809
                if not overwrite:
 
810
                    if remote_divergence(
 
811
                            old_refs.get(name), gitid, self.source_store):
 
812
                        raise DivergedBranches(self.source, self.target)
 
813
                ret[name] = gitid
 
814
            return ret
 
815
        new_refs = self.target.send_pack(
 
816
            git_update_refs,
 
817
            self.source._git.generate_pack_data)
 
818
        return None, self.old_refs, new_refs
 
819
 
 
820
    @staticmethod
 
821
    def is_compatible(source, target):
 
822
        return (isinstance(source, LocalGitRepository) and
 
823
                isinstance(target, RemoteGitRepository))