1
# Copyright (C) 2009-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
# Copyright (C) 2012 Canonical Ltd
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License as published by
6
# the Free Software Foundation; either version 2 of the License, or
7
# (at your option) any later version.
9
# This program is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU General Public License for more details.
14
# You should have received a copy of the GNU General Public License
15
# along with this program; if not, write to the Free Software
16
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""Map from Git sha's to Bazaar objects."""
20
from __future__ import absolute_import
22
from dulwich.objects import (
29
from dulwich.object_store import (
32
from dulwich.pack import (
45
from ..lock import LogicalLockResult
46
from ..revision import (
49
from ..sixish import viewitems
50
from ..tree import InterTree
51
from ..bzr.testament import (
56
from_repository as cache_from_repository,
58
from .mapping import (
61
extract_unusual_modes,
65
from .unpeel_map import (
73
BANNED_FILENAMES = ['.git']
76
def get_object_store(repo, mapping=None):
77
git = getattr(repo, "_git", None)
79
git.object_store.unlock = lambda: None
80
git.object_store.lock_read = lambda: LogicalLockResult(lambda: None)
81
git.object_store.lock_write = lambda: LogicalLockResult(lambda: None)
82
return git.object_store
83
return BazaarObjectStore(repo, mapping)
86
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
89
class LRUTreeCache(object):
91
def __init__(self, repository):
92
def approx_tree_size(tree):
93
# Very rough estimate, 250 per inventory entry
95
inv = tree.root_inventory
96
except AttributeError:
99
self.repository = repository
100
self._cache = lru_cache.LRUSizeCache(
101
max_size=MAX_TREE_CACHE_SIZE, after_cleanup_size=None,
102
compute_size=approx_tree_size)
104
def revision_tree(self, revid):
106
tree = self._cache[revid]
108
tree = self.repository.revision_tree(revid)
112
def iter_revision_trees(self, revids):
117
tree = self._cache[revid]
121
if tree.get_revision_id() != revid:
122
raise AssertionError(
123
"revision id did not match: %s != %s" % (
124
tree.get_revision_id(), revid))
126
for tree in self.repository.revision_trees(todo):
127
trees[tree.get_revision_id()] = tree
129
return (trees[r] for r in revids)
131
def revision_trees(self, revids):
132
return list(self.iter_revision_trees(revids))
135
self._cache[tree.get_revision_id()] = tree
138
def _find_missing_bzr_revids(graph, want, have):
139
"""Find the revisions that have to be pushed.
141
:param get_parent_map: Function that returns the parents for a sequence
143
:param want: Revisions the target wants
144
:param have: Revisions the target already has
145
:return: Set of revisions to fetch
150
extra_todo = graph.find_unique_ancestors(rev, handled)
151
todo.update(extra_todo)
152
handled.update(extra_todo)
153
if NULL_REVISION in todo:
154
todo.remove(NULL_REVISION)
158
def _check_expected_sha(expected_sha, object):
159
"""Check whether an object matches an expected SHA.
161
:param expected_sha: None or expected SHA as either binary or as hex digest
162
:param object: Object to verify
164
if expected_sha is None:
166
if len(expected_sha) == 40:
167
if expected_sha != object.sha().hexdigest().encode('ascii'):
168
raise AssertionError("Invalid sha for %r: %s" % (object,
170
elif len(expected_sha) == 20:
171
if expected_sha != object.sha().digest():
172
raise AssertionError("Invalid sha for %r: %s" % (
173
object, sha_to_hex(expected_sha)))
175
raise AssertionError("Unknown length %d for %r" % (len(expected_sha),
179
def directory_to_tree(path, children, lookup_ie_sha1, unusual_modes,
180
empty_file_name, allow_empty=False):
181
"""Create a Git Tree object from a Bazaar directory.
183
:param path: directory path
184
:param children: Children inventory entries
185
:param lookup_ie_sha1: Lookup the Git SHA1 for a inventory entry
186
:param unusual_modes: Dictionary with unusual file modes by file ids
187
:param empty_file_name: Name to use for dummy files in empty directories,
188
None to ignore empty directories.
191
for value in children:
192
if value.name in BANNED_FILENAMES:
194
child_path = osutils.pathjoin(path, value.name)
196
mode = unusual_modes[child_path]
198
mode = entry_mode(value)
199
hexsha = lookup_ie_sha1(child_path, value)
200
if hexsha is not None:
201
tree.add(value.name.encode("utf-8"), mode, hexsha)
202
if not allow_empty and len(tree) == 0:
203
# Only the root can be an empty tree
204
if empty_file_name is not None:
205
tree.add(empty_file_name, stat.S_IFREG | 0o644, Blob().id)
211
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
212
dummy_file_name=None, add_cache_entry=None):
213
"""Iterate over the objects that were introduced in a revision.
216
:param parent_trees: Parent revision trees
217
:param unusual_modes: Unusual file modes dictionary
218
:param dummy_file_name: File name to use for dummy files
219
in empty directories. None to skip empty directories
220
:return: Yields (path, object, ie) entries
226
base_tree = parent_trees[0]
227
other_parent_trees = parent_trees[1:]
229
base_tree = tree._repository.revision_tree(NULL_REVISION)
230
other_parent_trees = []
232
def find_unchanged_parent_ie(path, kind, other, parent_trees):
233
for ptree in parent_trees:
234
intertree = InterTree.get(ptree, tree)
235
ppath = intertree.find_source_path(path)
236
if ppath is not None:
237
pkind = ptree.kind(ppath)
239
if (pkind == "file" and
240
ptree.get_file_sha1(ppath) == other):
242
ptree.path2id(ppath), ptree.get_file_revision(ppath))
243
if kind == "symlink":
244
if (pkind == "symlink" and
245
ptree.get_symlink_target(ppath) == other):
247
ptree.path2id(ppath), ptree.get_file_revision(ppath))
250
# Find all the changed blobs
251
for change in tree.iter_changes(base_tree):
252
if change.name[1] in BANNED_FILENAMES:
254
if change.kind[1] == "file":
255
sha1 = tree.get_file_sha1(change.path[1])
258
(pfile_id, prevision) = find_unchanged_parent_ie(
259
change.path[1], change.kind[1], sha1, other_parent_trees)
263
# It existed in one of the parents, with the same contents.
264
# So no need to yield any new git objects.
266
blob_id = idmap.lookup_blob_id(
269
if not change.changed_content:
272
blob.data = tree.get_file_text(change.path[1])
275
new_blobs.append((change.path[1], change.file_id))
277
# TODO(jelmer): This code path does not have any test coverage.
278
shamap[change.path[1]] = blob_id
279
if add_cache_entry is not None:
282
(change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
283
elif change.kind[1] == "symlink":
284
target = tree.get_symlink_target(change.path[1])
285
blob = symlink_to_blob(target)
286
shamap[change.path[1]] = blob.id
287
if add_cache_entry is not None:
289
blob, (change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
291
find_unchanged_parent_ie(
292
change.path[1], change.kind[1], target, other_parent_trees)
294
if change.changed_content:
295
yield (change.path[1], blob,
296
(change.file_id, tree.get_file_revision(change.path[1])))
297
elif change.kind[1] is None:
298
shamap[change.path[1]] = None
299
elif change.kind[1] != 'directory':
300
raise AssertionError(change.kind[1])
301
for p in change.path:
304
dirty_dirs.add(osutils.dirname(p))
306
# Fetch contents of the blobs that were changed
307
for (path, file_id), chunks in tree.iter_files_bytes(
308
[(path, (path, file_id)) for (path, file_id) in new_blobs]):
311
if add_cache_entry is not None:
312
add_cache_entry(obj, (file_id, tree.get_file_revision(path)), path)
313
yield path, obj, (file_id, tree.get_file_revision(path))
314
shamap[path] = obj.id
316
for path in unusual_modes:
317
dirty_dirs.add(posixpath.dirname(path))
319
for dir in list(dirty_dirs):
320
for parent in osutils.parent_directories(dir):
321
if parent in dirty_dirs:
323
dirty_dirs.add(parent)
328
def ie_to_hexsha(path, ie):
333
# FIXME: Should be the same as in parent
334
if ie.kind == "file":
336
return idmap.lookup_blob_id(ie.file_id, ie.revision)
340
blob.data = tree.get_file_text(path)
341
if add_cache_entry is not None:
342
add_cache_entry(blob, (ie.file_id, ie.revision), path)
344
elif ie.kind == "symlink":
346
return idmap.lookup_blob_id(ie.file_id, ie.revision)
349
target = tree.get_symlink_target(path)
350
blob = symlink_to_blob(target)
351
if add_cache_entry is not None:
352
add_cache_entry(blob, (ie.file_id, ie.revision), path)
354
elif ie.kind == "directory":
355
# Not all cache backends store the tree information,
356
# calculate again from scratch
357
ret = directory_to_tree(
358
path, ie.children.values(), ie_to_hexsha, unusual_modes,
359
dummy_file_name, ie.parent_id is None)
366
for path in sorted(dirty_dirs, reverse=True):
367
if not tree.has_filename(path):
370
if tree.kind(path) != 'directory':
373
obj = directory_to_tree(
374
path, tree.iter_child_entries(path), ie_to_hexsha, unusual_modes,
375
dummy_file_name, path == '')
378
file_id = tree.path2id(path)
379
if add_cache_entry is not None:
380
add_cache_entry(obj, (file_id, tree.get_revision_id()), path)
381
yield path, obj, (file_id, tree.get_revision_id())
382
shamap[path] = obj.id
385
class PackTupleIterable(object):
387
def __init__(self, store):
389
self.store.lock_read()
395
def add(self, sha, path):
396
self.objects[sha] = path
399
return len(self.objects)
402
return ((self.store[object_id], path) for (object_id, path) in
403
viewitems(self.objects))
406
class BazaarObjectStore(BaseObjectStore):
407
"""A Git-style object store backed onto a Bazaar repository."""
409
def __init__(self, repository, mapping=None):
410
self.repository = repository
411
self._map_updated = False
414
self.mapping = default_mapping
416
self.mapping = mapping
417
self._cache = cache_from_repository(repository)
418
self._content_cache_types = ("tree",)
419
self.start_write_group = self._cache.idmap.start_write_group
420
self.abort_write_group = self._cache.idmap.abort_write_group
421
self.commit_write_group = self._cache.idmap.commit_write_group
422
self.tree_cache = LRUTreeCache(self.repository)
423
self.unpeel_map = UnpeelMap.from_repository(self.repository)
425
def _missing_revisions(self, revisions):
426
return self._cache.idmap.missing_revisions(revisions)
428
def _update_sha_map(self, stop_revision=None):
429
if not self.is_locked():
430
raise errors.LockNotHeld(self)
431
if self._map_updated:
433
if (stop_revision is not None and
434
not self._missing_revisions([stop_revision])):
436
graph = self.repository.get_graph()
437
if stop_revision is None:
438
all_revids = self.repository.all_revision_ids()
439
missing_revids = self._missing_revisions(all_revids)
441
heads = set([stop_revision])
442
missing_revids = self._missing_revisions(heads)
444
parents = graph.get_parent_map(heads)
446
for p in parents.values():
447
todo.update([x for x in p if x not in missing_revids])
448
heads = self._missing_revisions(todo)
449
missing_revids.update(heads)
450
if NULL_REVISION in missing_revids:
451
missing_revids.remove(NULL_REVISION)
452
missing_revids = self.repository.has_revisions(missing_revids)
453
if not missing_revids:
454
if stop_revision is None:
455
self._map_updated = True
457
self.start_write_group()
459
with ui.ui_factory.nested_progress_bar() as pb:
460
for i, revid in enumerate(graph.iter_topo_order(
462
trace.mutter('processing %r', revid)
463
pb.update("updating git map", i, len(missing_revids))
464
self._update_sha_map_revision(revid)
465
if stop_revision is None:
466
self._map_updated = True
467
except BaseException:
468
self.abort_write_group()
471
self.commit_write_group()
474
self._update_sha_map()
475
return iter(self._cache.idmap.sha1s())
477
def _reconstruct_commit(self, rev, tree_sha, lossy, verifiers):
478
"""Reconstruct a Commit object.
480
:param rev: Revision object
481
:param tree_sha: SHA1 of the root tree object
482
:param lossy: Whether or not to roundtrip bzr metadata
483
:param verifiers: Verifiers for the commits
484
:return: Commit object
486
def parent_lookup(revid):
488
return self._lookup_revision_sha1(revid)
489
except errors.NoSuchRevision:
491
return self.mapping.export_commit(rev, tree_sha, parent_lookup,
494
def _revision_to_objects(self, rev, tree, lossy, add_cache_entry=None):
495
"""Convert a revision to a set of git objects.
497
:param rev: Bazaar revision object
498
:param tree: Bazaar revision tree
499
:param lossy: Whether to not roundtrip all Bazaar revision data
501
unusual_modes = extract_unusual_modes(rev)
502
present_parents = self.repository.has_revisions(rev.parent_ids)
503
parent_trees = self.tree_cache.revision_trees(
504
[p for p in rev.parent_ids if p in present_parents])
506
for path, obj, bzr_key_data in _tree_to_objects(
507
tree, parent_trees, self._cache.idmap, unusual_modes,
508
self.mapping.BZR_DUMMY_FILE, add_cache_entry):
511
root_key_data = bzr_key_data
512
# Don't yield just yet
515
if root_tree is None:
516
# Pointless commit - get the tree sha elsewhere
517
if not rev.parent_ids:
520
base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
521
root_tree = self[self[base_sha1].tree]
522
root_key_data = (tree.path2id(''), tree.get_revision_id())
523
if add_cache_entry is not None:
524
add_cache_entry(root_tree, root_key_data, "")
527
testament3 = StrictTestament3(rev, tree)
528
verifiers = {"testament3-sha1": testament3.as_sha1()}
531
commit_obj = self._reconstruct_commit(rev, root_tree.id,
532
lossy=lossy, verifiers=verifiers)
534
foreign_revid, mapping = mapping_registry.parse_revision_id(
536
except errors.InvalidRevisionId:
539
_check_expected_sha(foreign_revid, commit_obj)
540
if add_cache_entry is not None:
541
add_cache_entry(commit_obj, verifiers, None)
543
yield None, commit_obj
545
def _get_updater(self, rev):
546
return self._cache.get_updater(rev)
548
def _update_sha_map_revision(self, revid):
549
rev = self.repository.get_revision(revid)
550
tree = self.tree_cache.revision_tree(rev.revision_id)
551
updater = self._get_updater(rev)
552
# FIXME JRV 2011-12-15: Shouldn't we try both values for lossy ?
553
for path, obj in self._revision_to_objects(
554
rev, tree, lossy=(not self.mapping.roundtripping),
555
add_cache_entry=updater.add_object):
556
if isinstance(obj, Commit):
558
commit_obj = updater.finish()
561
def _reconstruct_blobs(self, keys):
562
"""Return a Git Blob object from a fileid and revision stored in bzr.
564
:param fileid: File id of the text
565
:param revision: Revision of the text
567
stream = self.repository.iter_files_bytes(
568
((key[0], key[1], key) for key in keys))
569
for (file_id, revision, expected_sha), chunks in stream:
571
blob.chunked = chunks
572
if blob.id != expected_sha and blob.data == b"":
573
# Perhaps it's a symlink ?
574
tree = self.tree_cache.revision_tree(revision)
575
path = tree.id2path(file_id)
576
if tree.kind(path) == 'symlink':
577
blob = symlink_to_blob(tree.get_symlink_target(path))
578
_check_expected_sha(expected_sha, blob)
581
def _reconstruct_tree(self, fileid, revid, bzr_tree, unusual_modes,
583
"""Return a Git Tree object from a file id and a revision stored in bzr.
585
:param fileid: fileid in the tree.
586
:param revision: Revision of the tree.
588
def get_ie_sha1(path, entry):
589
if entry.kind == "directory":
591
return self._cache.idmap.lookup_tree_id(entry.file_id,
593
except (NotImplementedError, KeyError):
594
obj = self._reconstruct_tree(
595
entry.file_id, revid, bzr_tree, unusual_modes)
600
elif entry.kind in ("file", "symlink"):
602
return self._cache.idmap.lookup_blob_id(entry.file_id,
606
return next(self._reconstruct_blobs(
607
[(entry.file_id, entry.revision, None)])).id
608
elif entry.kind == 'tree-reference':
609
# FIXME: Make sure the file id is the root id
610
return self._lookup_revision_sha1(entry.reference_revision)
612
raise AssertionError("unknown entry kind '%s'" % entry.kind)
613
path = bzr_tree.id2path(fileid)
614
tree = directory_to_tree(
616
bzr_tree.iter_child_entries(path),
617
get_ie_sha1, unusual_modes, self.mapping.BZR_DUMMY_FILE,
618
bzr_tree.path2id('') == fileid)
620
_check_expected_sha(expected_sha, tree)
623
def get_parents(self, sha):
624
"""Retrieve the parents of a Git commit by SHA1.
626
:param sha: SHA1 of the commit
627
:raises: KeyError, NotCommitError
629
return self[sha].parents
631
def _lookup_revision_sha1(self, revid):
632
"""Return the SHA1 matching a Bazaar revision."""
633
if revid == NULL_REVISION:
636
return self._cache.idmap.lookup_commit(revid)
639
return mapping_registry.parse_revision_id(revid)[0]
640
except errors.InvalidRevisionId:
641
self._update_sha_map(revid)
642
return self._cache.idmap.lookup_commit(revid)
644
def get_raw(self, sha):
645
"""Get the raw representation of a Git object by SHA1.
647
:param sha: SHA1 of the git object
650
sha = sha_to_hex(sha)
652
return (obj.type, obj.as_raw_string())
654
def __contains__(self, sha):
655
# See if sha is in map
657
for (type, type_data) in self.lookup_git_sha(sha):
659
if self.repository.has_revision(type_data[0]):
662
if type_data in self.repository.texts:
665
if self.repository.has_revision(type_data[1]):
668
raise AssertionError("Unknown object type '%s'" % type)
676
self._map_updated = False
677
self.repository.lock_read()
678
return LogicalLockResult(self.unlock)
680
def lock_write(self):
682
self._map_updated = False
683
self.repository.lock_write()
684
return LogicalLockResult(self.unlock)
687
return (self._locked is not None)
691
self._map_updated = False
692
self.repository.unlock()
694
def lookup_git_shas(self, shas):
698
ret[sha] = [("commit", (NULL_REVISION, None, {}))]
701
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
703
# if not, see if there are any unconverted revisions and
704
# add them to the map, search for sha in map again
705
self._update_sha_map()
707
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
712
def lookup_git_sha(self, sha):
713
return self.lookup_git_shas([sha])[sha]
715
def __getitem__(self, sha):
716
for (kind, type_data) in self.lookup_git_sha(sha):
717
# convert object to git object
719
(revid, tree_sha, verifiers) = type_data
721
rev = self.repository.get_revision(revid)
722
except errors.NoSuchRevision:
723
if revid == NULL_REVISION:
724
raise AssertionError(
725
"should not try to look up NULL_REVISION")
726
trace.mutter('entry for %s %s in shamap: %r, but not '
727
'found in repository', kind, sha, type_data)
729
# FIXME: the type data should say whether conversion was
731
commit = self._reconstruct_commit(
732
rev, tree_sha, lossy=(not self.mapping.roundtripping),
734
_check_expected_sha(sha, commit)
737
(fileid, revision) = type_data
738
blobs = self._reconstruct_blobs([(fileid, revision, sha)])
741
(fileid, revid) = type_data
743
tree = self.tree_cache.revision_tree(revid)
744
rev = self.repository.get_revision(revid)
745
except errors.NoSuchRevision:
747
'entry for %s %s in shamap: %r, but not found in '
748
'repository', kind, sha, type_data)
750
unusual_modes = extract_unusual_modes(rev)
752
return self._reconstruct_tree(
753
fileid, revid, tree, unusual_modes, expected_sha=sha)
754
except errors.NoSuchRevision:
757
raise AssertionError("Unknown object type '%s'" % kind)
761
def generate_lossy_pack_data(self, have, want, progress=None,
762
get_tagged=None, ofs_delta=False):
763
return pack_objects_to_data(
764
self.generate_pack_contents(have, want, progress, get_tagged,
767
def generate_pack_contents(self, have, want, progress=None,
768
ofs_delta=False, get_tagged=None, lossy=False):
769
"""Iterate over the contents of a pack file.
771
:param have: List of SHA1s of objects that should not be sent
772
:param want: List of SHA1s of objects that should be sent
775
ret = self.lookup_git_shas(have + want)
776
for commit_sha in have:
777
commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
779
for (type, type_data) in ret[commit_sha]:
781
raise AssertionError("Type was %s, not commit" % type)
782
processed.add(type_data[0])
784
trace.mutter("unable to find remote ref %s", commit_sha)
786
for commit_sha in want:
787
if commit_sha in have:
790
for (type, type_data) in ret[commit_sha]:
792
raise AssertionError("Type was %s, not commit" % type)
793
pending.add(type_data[0])
797
graph = self.repository.get_graph()
798
todo = _find_missing_bzr_revids(graph, pending, processed)
799
ret = PackTupleIterable(self)
800
with ui.ui_factory.nested_progress_bar() as pb:
801
for i, revid in enumerate(graph.iter_topo_order(todo)):
802
pb.update("generating git objects", i, len(todo))
804
rev = self.repository.get_revision(revid)
805
except errors.NoSuchRevision:
807
tree = self.tree_cache.revision_tree(revid)
808
for path, obj in self._revision_to_objects(
809
rev, tree, lossy=lossy):
810
ret.add(obj.id, path)
813
def add_thin_pack(self):
816
fd, path = tempfile.mkstemp(suffix=".pack")
817
f = os.fdopen(fd, 'wb')
820
from .fetch import import_git_objects
823
if os.path.getsize(path) == 0:
826
pd.create_index_v2(path[:-5] + ".idx", self.object_store.get_raw)
829
with self.repository.lock_write():
830
self.repository.start_write_group()
832
import_git_objects(self.repository, self.mapping,
833
p.iterobjects(get_raw=self.get_raw),
835
except BaseException:
836
self.repository.abort_write_group()
839
self.repository.commit_write_group()
842
# The pack isn't kept around anyway, so no point
843
# in treating full packs different from thin packs
844
add_pack = add_thin_pack