/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/object_store.py

  • Committer: Breezy landing bot
  • Author(s): Jelmer Vernooij
  • Date: 2020-07-28 02:47:10 UTC
  • mfrom: (7519.1.1 merge-3.1)
  • Revision ID: breezy.the.bot@gmail.com-20200728024710-a2ylds219f1lsl62
Merge lp:brz/3.1.

Merged from https://code.launchpad.net/~jelmer/brz/merge-3.1/+merge/388173

Show diffs side-by-side

added added

removed removed

Lines of Context:
17
17
 
18
18
"""Map from Git sha's to Bazaar objects."""
19
19
 
20
 
from __future__ import absolute_import
21
 
 
22
20
from dulwich.objects import (
23
21
    Blob,
24
22
    Commit,
46
44
from ..revision import (
47
45
    NULL_REVISION,
48
46
    )
49
 
from ..sixish import viewitems
50
47
from ..tree import InterTree
51
48
from ..bzr.testament import (
52
49
    StrictTestament3,
57
54
    )
58
55
from .mapping import (
59
56
    default_mapping,
 
57
    encode_git_path,
60
58
    entry_mode,
61
59
    extract_unusual_modes,
62
60
    mapping_registry,
135
133
        self._cache[tree.get_revision_id()] = tree
136
134
 
137
135
 
138
 
def _find_missing_bzr_revids(graph, want, have):
 
136
def _find_missing_bzr_revids(graph, want, have, shallow=None):
139
137
    """Find the revisions that have to be pushed.
140
138
 
141
139
    :param get_parent_map: Function that returns the parents for a sequence
145
143
    :return: Set of revisions to fetch
146
144
    """
147
145
    handled = set(have)
 
146
    if shallow:
 
147
        # Shallows themselves still need to be fetched, but let's exclude their
 
148
        # parents.
 
149
        for ps in graph.get_parent_map(shallow).values():
 
150
            handled.update(ps)
 
151
    handled.add(NULL_REVISION)
148
152
    todo = set()
149
153
    for rev in want:
150
154
        extra_todo = graph.find_unique_ancestors(rev, handled)
151
155
        todo.update(extra_todo)
152
156
        handled.update(extra_todo)
153
 
    if NULL_REVISION in todo:
154
 
        todo.remove(NULL_REVISION)
155
157
    return todo
156
158
 
157
159
 
198
200
            mode = entry_mode(value)
199
201
        hexsha = lookup_ie_sha1(child_path, value)
200
202
        if hexsha is not None:
201
 
            tree.add(value.name.encode("utf-8"), mode, hexsha)
 
203
            tree.add(encode_git_path(value.name), mode, hexsha)
202
204
    if not allow_empty and len(tree) == 0:
203
205
        # Only the root can be an empty tree
204
206
        if empty_file_name is not None:
307
309
    for (path, file_id), chunks in tree.iter_files_bytes(
308
310
            [(path, (path, file_id)) for (path, file_id) in new_blobs]):
309
311
        obj = Blob()
310
 
        obj.chunked = chunks
 
312
        obj.chunked = list(chunks)
311
313
        if add_cache_entry is not None:
312
314
            add_cache_entry(obj, (file_id, tree.get_file_revision(path)), path)
313
315
        yield path, obj, (file_id, tree.get_file_revision(path))
400
402
 
401
403
    def __iter__(self):
402
404
        return ((self.store[object_id], path) for (object_id, path) in
403
 
                viewitems(self.objects))
 
405
                self.objects.items())
404
406
 
405
407
 
406
408
class BazaarObjectStore(BaseObjectStore):
568
570
            ((key[0], key[1], key) for key in keys))
569
571
        for (file_id, revision, expected_sha), chunks in stream:
570
572
            blob = Blob()
571
 
            blob.chunked = chunks
 
573
            blob.chunked = list(chunks)
572
574
            if blob.id != expected_sha and blob.data == b"":
573
575
                # Perhaps it's a symlink ?
574
576
                tree = self.tree_cache.revision_tree(revision)
758
760
        else:
759
761
            raise KeyError(sha)
760
762
 
761
 
    def generate_lossy_pack_data(self, have, want, progress=None,
 
763
    def generate_lossy_pack_data(self, have, want, shallow=None,
 
764
                                 progress=None,
762
765
                                 get_tagged=None, ofs_delta=False):
763
766
        return pack_objects_to_data(
764
 
            self.generate_pack_contents(have, want, progress, get_tagged,
 
767
            self.generate_pack_contents(have, want, progress=progress,
 
768
                                        shallow=shallow, get_tagged=get_tagged,
765
769
                                        lossy=True))
766
770
 
767
 
    def generate_pack_contents(self, have, want, progress=None,
 
771
    def generate_pack_contents(self, have, want, shallow=None, progress=None,
768
772
                               ofs_delta=False, get_tagged=None, lossy=False):
769
773
        """Iterate over the contents of a pack file.
770
774
 
793
797
                    pending.add(type_data[0])
794
798
            except KeyError:
795
799
                pass
 
800
        shallows = set()
 
801
        for commit_sha in shallow or set():
 
802
            try:
 
803
                for (type, type_data) in ret[commit_sha]:
 
804
                    if type != "commit":
 
805
                        raise AssertionError("Type was %s, not commit" % type)
 
806
                    shallows.add(type_data[0])
 
807
            except KeyError:
 
808
                pass
796
809
 
797
810
        graph = self.repository.get_graph()
798
 
        todo = _find_missing_bzr_revids(graph, pending, processed)
 
811
        todo = _find_missing_bzr_revids(graph, pending, processed, shallow)
799
812
        ret = PackTupleIterable(self)
800
813
        with ui.ui_factory.nested_progress_bar() as pb:
801
814
            for i, revid in enumerate(graph.iter_topo_order(todo)):