/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/object_store.py

  • Committer: Jelmer Vernooij
  • Date: 2020-03-22 20:02:36 UTC
  • mto: (7490.7.7 work)
  • mto: This revision was merged to the branch mainline in revision 7501.
  • Revision ID: jelmer@jelmer.uk-20200322200236-fsbl91ktcn6fcbdd
Fix tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
17
17
 
18
18
"""Map from Git sha's to Bazaar objects."""
19
19
 
 
20
from __future__ import absolute_import
 
21
 
20
22
from dulwich.objects import (
21
23
    Blob,
22
24
    Commit,
44
46
from ..revision import (
45
47
    NULL_REVISION,
46
48
    )
 
49
from ..sixish import viewitems
47
50
from ..tree import InterTree
48
51
from ..bzr.testament import (
49
52
    StrictTestament3,
54
57
    )
55
58
from .mapping import (
56
59
    default_mapping,
57
 
    encode_git_path,
58
60
    entry_mode,
59
61
    extract_unusual_modes,
60
62
    mapping_registry,
133
135
        self._cache[tree.get_revision_id()] = tree
134
136
 
135
137
 
136
 
def _find_missing_bzr_revids(graph, want, have, shallow=None):
 
138
def _find_missing_bzr_revids(graph, want, have):
137
139
    """Find the revisions that have to be pushed.
138
140
 
139
141
    :param get_parent_map: Function that returns the parents for a sequence
143
145
    :return: Set of revisions to fetch
144
146
    """
145
147
    handled = set(have)
146
 
    if shallow:
147
 
        # Shallows themselves still need to be fetched, but let's exclude their
148
 
        # parents.
149
 
        for ps in graph.get_parent_map(shallow).values():
150
 
            handled.update(ps)
151
 
    handled.add(NULL_REVISION)
152
148
    todo = set()
153
149
    for rev in want:
154
150
        extra_todo = graph.find_unique_ancestors(rev, handled)
155
151
        todo.update(extra_todo)
156
152
        handled.update(extra_todo)
 
153
    if NULL_REVISION in todo:
 
154
        todo.remove(NULL_REVISION)
157
155
    return todo
158
156
 
159
157
 
200
198
            mode = entry_mode(value)
201
199
        hexsha = lookup_ie_sha1(child_path, value)
202
200
        if hexsha is not None:
203
 
            tree.add(encode_git_path(value.name), mode, hexsha)
 
201
            tree.add(value.name.encode("utf-8"), mode, hexsha)
204
202
    if not allow_empty and len(tree) == 0:
205
203
        # Only the root can be an empty tree
206
204
        if empty_file_name is not None:
309
307
    for (path, file_id), chunks in tree.iter_files_bytes(
310
308
            [(path, (path, file_id)) for (path, file_id) in new_blobs]):
311
309
        obj = Blob()
312
 
        obj.chunked = list(chunks)
 
310
        obj.chunked = chunks
313
311
        if add_cache_entry is not None:
314
312
            add_cache_entry(obj, (file_id, tree.get_file_revision(path)), path)
315
313
        yield path, obj, (file_id, tree.get_file_revision(path))
402
400
 
403
401
    def __iter__(self):
404
402
        return ((self.store[object_id], path) for (object_id, path) in
405
 
                self.objects.items())
 
403
                viewitems(self.objects))
406
404
 
407
405
 
408
406
class BazaarObjectStore(BaseObjectStore):
570
568
            ((key[0], key[1], key) for key in keys))
571
569
        for (file_id, revision, expected_sha), chunks in stream:
572
570
            blob = Blob()
573
 
            blob.chunked = list(chunks)
 
571
            blob.chunked = chunks
574
572
            if blob.id != expected_sha and blob.data == b"":
575
573
                # Perhaps it's a symlink ?
576
574
                tree = self.tree_cache.revision_tree(revision)
760
758
        else:
761
759
            raise KeyError(sha)
762
760
 
763
 
    def generate_lossy_pack_data(self, have, want, shallow=None,
764
 
                                 progress=None,
 
761
    def generate_lossy_pack_data(self, have, want, progress=None,
765
762
                                 get_tagged=None, ofs_delta=False):
766
763
        return pack_objects_to_data(
767
 
            self.generate_pack_contents(have, want, progress=progress,
768
 
                                        shallow=shallow, get_tagged=get_tagged,
 
764
            self.generate_pack_contents(have, want, progress, get_tagged,
769
765
                                        lossy=True))
770
766
 
771
 
    def generate_pack_contents(self, have, want, shallow=None, progress=None,
 
767
    def generate_pack_contents(self, have, want, progress=None,
772
768
                               ofs_delta=False, get_tagged=None, lossy=False):
773
769
        """Iterate over the contents of a pack file.
774
770
 
797
793
                    pending.add(type_data[0])
798
794
            except KeyError:
799
795
                pass
800
 
        shallows = set()
801
 
        for commit_sha in shallow or set():
802
 
            try:
803
 
                for (type, type_data) in ret[commit_sha]:
804
 
                    if type != "commit":
805
 
                        raise AssertionError("Type was %s, not commit" % type)
806
 
                    shallows.add(type_data[0])
807
 
            except KeyError:
808
 
                pass
809
796
 
810
797
        graph = self.repository.get_graph()
811
 
        todo = _find_missing_bzr_revids(graph, pending, processed, shallow)
 
798
        todo = _find_missing_bzr_revids(graph, pending, processed)
812
799
        ret = PackTupleIterable(self)
813
800
        with ui.ui_factory.nested_progress_bar() as pb:
814
801
            for i, revid in enumerate(graph.iter_topo_order(todo)):