/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/object_store.py

  • Committer: Jelmer Vernooij
  • Date: 2019-06-03 23:48:08 UTC
  • mfrom: (7316 work)
  • mto: This revision was merged to the branch mainline in revision 7328.
  • Revision ID: jelmer@jelmer.uk-20190603234808-15yk5c7054tj8e2b
Merge trunk.

Show diffs side-by-side

added added

removed removed

Lines of Context:
47
47
    NULL_REVISION,
48
48
    )
49
49
from ..sixish import viewitems
50
 
from ..tree import InterTree
51
50
from ..bzr.testament import (
52
51
    StrictTestament3,
53
52
    )
229
228
        base_tree = tree._repository.revision_tree(NULL_REVISION)
230
229
        other_parent_trees = []
231
230
 
232
 
    def find_unchanged_parent_ie(path, kind, other, parent_trees):
 
231
    def find_unchanged_parent_ie(file_id, kind, other, parent_trees):
233
232
        for ptree in parent_trees:
234
 
            intertree = InterTree.get(ptree, tree)
235
 
            ppath = intertree.find_source_path(path)
236
 
            if ppath is not None:
 
233
            try:
 
234
                ppath = ptree.id2path(file_id)
 
235
            except errors.NoSuchId:
 
236
                pass
 
237
            else:
237
238
                pkind = ptree.kind(ppath)
238
239
                if kind == "file":
239
240
                    if (pkind == "file" and
240
241
                            ptree.get_file_sha1(ppath) == other):
241
242
                        return (
242
 
                            ptree.path2id(ppath), ptree.get_file_revision(ppath))
 
243
                            file_id, ptree.get_file_revision(ppath))
243
244
                if kind == "symlink":
244
245
                    if (pkind == "symlink" and
245
246
                            ptree.get_symlink_target(ppath) == other):
246
247
                        return (
247
 
                            ptree.path2id(ppath), ptree.get_file_revision(ppath))
 
248
                            file_id, ptree.get_file_revision(ppath))
248
249
        raise KeyError
249
250
 
250
251
    # Find all the changed blobs
251
 
    for change in tree.iter_changes(base_tree):
252
 
        if change.name[1] in BANNED_FILENAMES:
 
252
    for (file_id, path, changed_content, versioned, parent, name, kind,
 
253
         executable) in tree.iter_changes(base_tree):
 
254
        if name[1] in BANNED_FILENAMES:
253
255
            continue
254
 
        if change.kind[1] == "file":
255
 
            sha1 = tree.get_file_sha1(change.path[1])
 
256
        if kind[1] == "file":
 
257
            sha1 = tree.get_file_sha1(path[1])
256
258
            blob_id = None
257
259
            try:
258
260
                (pfile_id, prevision) = find_unchanged_parent_ie(
259
 
                    change.path[1], change.kind[1], sha1, other_parent_trees)
 
261
                    file_id, kind[1], sha1, other_parent_trees)
260
262
            except KeyError:
261
263
                pass
262
264
            else:
266
268
                    blob_id = idmap.lookup_blob_id(
267
269
                        pfile_id, prevision)
268
270
                except KeyError:
269
 
                    if not change.changed_content:
 
271
                    if not changed_content:
270
272
                        # no-change merge ?
271
273
                        blob = Blob()
272
 
                        blob.data = tree.get_file_text(change.path[1])
 
274
                        blob.data = tree.get_file_text(path[1])
273
275
                        blob_id = blob.id
274
276
            if blob_id is None:
275
 
                new_blobs.append((change.path[1], change.file_id))
 
277
                new_blobs.append((path[1], file_id))
276
278
            else:
277
 
                # TODO(jelmer): This code path does not have any test coverage.
278
 
                shamap[change.path[1]] = blob_id
 
279
                shamap[path[1]] = blob_id
279
280
                if add_cache_entry is not None:
280
281
                    add_cache_entry(
281
282
                        ("blob", blob_id),
282
 
                        (change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
283
 
        elif change.kind[1] == "symlink":
284
 
            target = tree.get_symlink_target(change.path[1])
 
283
                        (file_id, tree.get_file_revision(path[1])), path[1])
 
284
        elif kind[1] == "symlink":
 
285
            target = tree.get_symlink_target(path[1])
285
286
            blob = symlink_to_blob(target)
286
 
            shamap[change.path[1]] = blob.id
 
287
            shamap[path[1]] = blob.id
287
288
            if add_cache_entry is not None:
288
289
                add_cache_entry(
289
 
                    blob, (change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
 
290
                    blob, (file_id, tree.get_file_revision(path[1])), path[1])
290
291
            try:
291
292
                find_unchanged_parent_ie(
292
 
                    change.path[1], change.kind[1], target, other_parent_trees)
 
293
                    file_id, kind[1], target, other_parent_trees)
293
294
            except KeyError:
294
 
                if change.changed_content:
295
 
                    yield (change.path[1], blob,
296
 
                           (change.file_id, tree.get_file_revision(change.path[1])))
297
 
        elif change.kind[1] is None:
298
 
            shamap[change.path[1]] = None
299
 
        elif change.kind[1] != 'directory':
300
 
            raise AssertionError(change.kind[1])
301
 
        for p in change.path:
 
295
                if changed_content:
 
296
                    yield (path[1], blob,
 
297
                           (file_id, tree.get_file_revision(path[1])))
 
298
        elif kind[1] is None:
 
299
            shamap[path[1]] = None
 
300
        elif kind[1] != 'directory':
 
301
            raise AssertionError(kind[1])
 
302
        for p in path:
302
303
            if p is None:
303
304
                continue
304
305
            dirty_dirs.add(osutils.dirname(p))
456
457
            return
457
458
        self.start_write_group()
458
459
        try:
459
 
            with ui.ui_factory.nested_progress_bar() as pb:
 
460
            pb = ui.ui_factory.nested_progress_bar()
 
461
            try:
460
462
                for i, revid in enumerate(graph.iter_topo_order(
461
463
                        missing_revids)):
462
464
                    trace.mutter('processing %r', revid)
463
465
                    pb.update("updating git map", i, len(missing_revids))
464
466
                    self._update_sha_map_revision(revid)
 
467
            finally:
 
468
                pb.finished()
465
469
            if stop_revision is None:
466
470
                self._map_updated = True
467
471
        except BaseException:
491
495
        return self.mapping.export_commit(rev, tree_sha, parent_lookup,
492
496
                                          lossy, verifiers)
493
497
 
 
498
    def _create_fileid_map_blob(self, tree):
 
499
        # FIXME: This can probably be a lot more efficient,
 
500
        # not all files necessarily have to be processed.
 
501
        file_ids = {}
 
502
        for (path, ie) in tree.iter_entries_by_dir():
 
503
            if self.mapping.generate_file_id(path) != ie.file_id:
 
504
                file_ids[path] = ie.file_id
 
505
        return self.mapping.export_fileid_map(file_ids)
 
506
 
494
507
    def _revision_to_objects(self, rev, tree, lossy, add_cache_entry=None):
495
508
        """Convert a revision to a set of git objects.
496
509
 
519
532
            else:
520
533
                base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
521
534
                root_tree = self[self[base_sha1].tree]
522
 
            root_key_data = (tree.path2id(''), tree.get_revision_id())
 
535
            root_key_data = (tree.get_root_id(), tree.get_revision_id())
 
536
        if not lossy and self.mapping.BZR_FILE_IDS_FILE is not None:
 
537
            b = self._create_fileid_map_blob(tree)
 
538
            if b is not None:
 
539
                root_tree[self.mapping.BZR_FILE_IDS_FILE] = (
 
540
                    (stat.S_IFREG | 0o644), b.id)
 
541
                yield self.mapping.BZR_FILE_IDS_FILE, b
523
542
        if add_cache_entry is not None:
524
543
            add_cache_entry(root_tree, root_key_data, "")
525
544
        yield "", root_tree
615
634
            path,
616
635
            bzr_tree.iter_child_entries(path),
617
636
            get_ie_sha1, unusual_modes, self.mapping.BZR_DUMMY_FILE,
618
 
            bzr_tree.path2id('') == fileid)
 
637
            bzr_tree.get_root_id() == fileid)
 
638
        if (bzr_tree.get_root_id() == fileid and
 
639
                self.mapping.BZR_FILE_IDS_FILE is not None):
 
640
            if tree is None:
 
641
                tree = Tree()
 
642
            b = self._create_fileid_map_blob(bzr_tree)
 
643
            # If this is the root tree, add the file ids
 
644
            tree[self.mapping.BZR_FILE_IDS_FILE] = (
 
645
                (stat.S_IFREG | 0o644), b.id)
619
646
        if tree is not None:
620
647
            _check_expected_sha(expected_sha, tree)
621
648
        return tree
797
824
        graph = self.repository.get_graph()
798
825
        todo = _find_missing_bzr_revids(graph, pending, processed)
799
826
        ret = PackTupleIterable(self)
800
 
        with ui.ui_factory.nested_progress_bar() as pb:
 
827
        pb = ui.ui_factory.nested_progress_bar()
 
828
        try:
801
829
            for i, revid in enumerate(graph.iter_topo_order(todo)):
802
830
                pb.update("generating git objects", i, len(todo))
803
831
                try:
809
837
                        rev, tree, lossy=lossy):
810
838
                    ret.add(obj.id, path)
811
839
            return ret
 
840
        finally:
 
841
            pb.finished()
812
842
 
813
843
    def add_thin_pack(self):
814
844
        import tempfile