1
# Copyright (C) 2009-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
# Copyright (C) 2012 Canonical Ltd
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License as published by
6
# the Free Software Foundation; either version 2 of the License, or
7
# (at your option) any later version.
9
# This program is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU General Public License for more details.
14
# You should have received a copy of the GNU General Public License
15
# along with this program; if not, write to the Free Software
16
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""Map from Git sha's to Bazaar objects."""
20
from dulwich.objects import (
27
from dulwich.object_store import (
30
from dulwich.pack import (
43
from ..lock import LogicalLockResult
44
from ..revision import (
47
from ..tree import InterTree
48
from ..bzr.testament import (
53
from_repository as cache_from_repository,
55
from .mapping import (
59
extract_unusual_modes,
63
from .unpeel_map import (
71
BANNED_FILENAMES = ['.git']
74
def get_object_store(repo, mapping=None):
75
git = getattr(repo, "_git", None)
77
git.object_store.unlock = lambda: None
78
git.object_store.lock_read = lambda: LogicalLockResult(lambda: None)
79
git.object_store.lock_write = lambda: LogicalLockResult(lambda: None)
80
return git.object_store
81
return BazaarObjectStore(repo, mapping)
84
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
87
class LRUTreeCache(object):
89
def __init__(self, repository):
90
def approx_tree_size(tree):
91
# Very rough estimate, 250 per inventory entry
93
inv = tree.root_inventory
94
except AttributeError:
97
self.repository = repository
98
self._cache = lru_cache.LRUSizeCache(
99
max_size=MAX_TREE_CACHE_SIZE, after_cleanup_size=None,
100
compute_size=approx_tree_size)
102
def revision_tree(self, revid):
104
tree = self._cache[revid]
106
tree = self.repository.revision_tree(revid)
110
def iter_revision_trees(self, revids):
115
tree = self._cache[revid]
119
if tree.get_revision_id() != revid:
120
raise AssertionError(
121
"revision id did not match: %s != %s" % (
122
tree.get_revision_id(), revid))
124
for tree in self.repository.revision_trees(todo):
125
trees[tree.get_revision_id()] = tree
127
return (trees[r] for r in revids)
129
def revision_trees(self, revids):
130
return list(self.iter_revision_trees(revids))
133
self._cache[tree.get_revision_id()] = tree
136
def _find_missing_bzr_revids(graph, want, have, shallow=None):
137
"""Find the revisions that have to be pushed.
139
:param get_parent_map: Function that returns the parents for a sequence
141
:param want: Revisions the target wants
142
:param have: Revisions the target already has
143
:return: Set of revisions to fetch
147
# Shallows themselves still need to be fetched, but let's exclude their
149
for ps in graph.get_parent_map(shallow).values():
151
handled.add(NULL_REVISION)
154
extra_todo = graph.find_unique_ancestors(rev, handled)
155
todo.update(extra_todo)
156
handled.update(extra_todo)
160
def _check_expected_sha(expected_sha, object):
161
"""Check whether an object matches an expected SHA.
163
:param expected_sha: None or expected SHA as either binary or as hex digest
164
:param object: Object to verify
166
if expected_sha is None:
168
if len(expected_sha) == 40:
169
if expected_sha != object.sha().hexdigest().encode('ascii'):
170
raise AssertionError("Invalid sha for %r: %s" % (object,
172
elif len(expected_sha) == 20:
173
if expected_sha != object.sha().digest():
174
raise AssertionError("Invalid sha for %r: %s" % (
175
object, sha_to_hex(expected_sha)))
177
raise AssertionError("Unknown length %d for %r" % (len(expected_sha),
181
def directory_to_tree(path, children, lookup_ie_sha1, unusual_modes,
182
empty_file_name, allow_empty=False):
183
"""Create a Git Tree object from a Bazaar directory.
185
:param path: directory path
186
:param children: Children inventory entries
187
:param lookup_ie_sha1: Lookup the Git SHA1 for a inventory entry
188
:param unusual_modes: Dictionary with unusual file modes by file ids
189
:param empty_file_name: Name to use for dummy files in empty directories,
190
None to ignore empty directories.
193
for value in children:
194
if value.name in BANNED_FILENAMES:
196
child_path = osutils.pathjoin(path, value.name)
198
mode = unusual_modes[child_path]
200
mode = entry_mode(value)
201
hexsha = lookup_ie_sha1(child_path, value)
202
if hexsha is not None:
203
tree.add(encode_git_path(value.name), mode, hexsha)
204
if not allow_empty and len(tree) == 0:
205
# Only the root can be an empty tree
206
if empty_file_name is not None:
207
tree.add(empty_file_name, stat.S_IFREG | 0o644, Blob().id)
213
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
214
dummy_file_name=None, add_cache_entry=None):
215
"""Iterate over the objects that were introduced in a revision.
218
:param parent_trees: Parent revision trees
219
:param unusual_modes: Unusual file modes dictionary
220
:param dummy_file_name: File name to use for dummy files
221
in empty directories. None to skip empty directories
222
:return: Yields (path, object, ie) entries
228
base_tree = parent_trees[0]
229
other_parent_trees = parent_trees[1:]
231
base_tree = tree._repository.revision_tree(NULL_REVISION)
232
other_parent_trees = []
234
def find_unchanged_parent_ie(path, kind, other, parent_trees):
235
for ptree in parent_trees:
236
intertree = InterTree.get(ptree, tree)
237
ppath = intertree.find_source_path(path)
238
if ppath is not None:
239
pkind = ptree.kind(ppath)
241
if (pkind == "file" and
242
ptree.get_file_sha1(ppath) == other):
244
ptree.path2id(ppath), ptree.get_file_revision(ppath))
245
if kind == "symlink":
246
if (pkind == "symlink" and
247
ptree.get_symlink_target(ppath) == other):
249
ptree.path2id(ppath), ptree.get_file_revision(ppath))
252
# Find all the changed blobs
253
for change in tree.iter_changes(base_tree):
254
if change.name[1] in BANNED_FILENAMES:
256
if change.kind[1] == "file":
257
sha1 = tree.get_file_sha1(change.path[1])
260
(pfile_id, prevision) = find_unchanged_parent_ie(
261
change.path[1], change.kind[1], sha1, other_parent_trees)
265
# It existed in one of the parents, with the same contents.
266
# So no need to yield any new git objects.
268
blob_id = idmap.lookup_blob_id(
271
if not change.changed_content:
274
blob.data = tree.get_file_text(change.path[1])
277
new_blobs.append((change.path[1], change.file_id))
279
# TODO(jelmer): This code path does not have any test coverage.
280
shamap[change.path[1]] = blob_id
281
if add_cache_entry is not None:
284
(change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
285
elif change.kind[1] == "symlink":
286
target = tree.get_symlink_target(change.path[1])
287
blob = symlink_to_blob(target)
288
shamap[change.path[1]] = blob.id
289
if add_cache_entry is not None:
291
blob, (change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
293
find_unchanged_parent_ie(
294
change.path[1], change.kind[1], target, other_parent_trees)
296
if change.changed_content:
297
yield (change.path[1], blob,
298
(change.file_id, tree.get_file_revision(change.path[1])))
299
elif change.kind[1] is None:
300
shamap[change.path[1]] = None
301
elif change.kind[1] != 'directory':
302
raise AssertionError(change.kind[1])
303
for p in change.path:
306
dirty_dirs.add(osutils.dirname(p))
308
# Fetch contents of the blobs that were changed
309
for (path, file_id), chunks in tree.iter_files_bytes(
310
[(path, (path, file_id)) for (path, file_id) in new_blobs]):
312
obj.chunked = list(chunks)
313
if add_cache_entry is not None:
314
add_cache_entry(obj, (file_id, tree.get_file_revision(path)), path)
315
yield path, obj, (file_id, tree.get_file_revision(path))
316
shamap[path] = obj.id
318
for path in unusual_modes:
319
dirty_dirs.add(posixpath.dirname(path))
321
for dir in list(dirty_dirs):
322
for parent in osutils.parent_directories(dir):
323
if parent in dirty_dirs:
325
dirty_dirs.add(parent)
330
def ie_to_hexsha(path, ie):
335
# FIXME: Should be the same as in parent
336
if ie.kind == "file":
338
return idmap.lookup_blob_id(ie.file_id, ie.revision)
342
blob.data = tree.get_file_text(path)
343
if add_cache_entry is not None:
344
add_cache_entry(blob, (ie.file_id, ie.revision), path)
346
elif ie.kind == "symlink":
348
return idmap.lookup_blob_id(ie.file_id, ie.revision)
351
target = tree.get_symlink_target(path)
352
blob = symlink_to_blob(target)
353
if add_cache_entry is not None:
354
add_cache_entry(blob, (ie.file_id, ie.revision), path)
356
elif ie.kind == "directory":
357
# Not all cache backends store the tree information,
358
# calculate again from scratch
359
ret = directory_to_tree(
360
path, ie.children.values(), ie_to_hexsha, unusual_modes,
361
dummy_file_name, ie.parent_id is None)
368
for path in sorted(dirty_dirs, reverse=True):
369
if not tree.has_filename(path):
372
if tree.kind(path) != 'directory':
375
obj = directory_to_tree(
376
path, tree.iter_child_entries(path), ie_to_hexsha, unusual_modes,
377
dummy_file_name, path == '')
380
file_id = tree.path2id(path)
381
if add_cache_entry is not None:
382
add_cache_entry(obj, (file_id, tree.get_revision_id()), path)
383
yield path, obj, (file_id, tree.get_revision_id())
384
shamap[path] = obj.id
387
class PackTupleIterable(object):
389
def __init__(self, store):
391
self.store.lock_read()
397
def add(self, sha, path):
398
self.objects[sha] = path
401
return len(self.objects)
404
return ((self.store[object_id], path) for (object_id, path) in
405
self.objects.items())
408
class BazaarObjectStore(BaseObjectStore):
409
"""A Git-style object store backed onto a Bazaar repository."""
411
def __init__(self, repository, mapping=None):
412
self.repository = repository
413
self._map_updated = False
416
self.mapping = default_mapping
418
self.mapping = mapping
419
self._cache = cache_from_repository(repository)
420
self._content_cache_types = ("tree",)
421
self.start_write_group = self._cache.idmap.start_write_group
422
self.abort_write_group = self._cache.idmap.abort_write_group
423
self.commit_write_group = self._cache.idmap.commit_write_group
424
self.tree_cache = LRUTreeCache(self.repository)
425
self.unpeel_map = UnpeelMap.from_repository(self.repository)
427
def _missing_revisions(self, revisions):
428
return self._cache.idmap.missing_revisions(revisions)
430
def _update_sha_map(self, stop_revision=None):
431
if not self.is_locked():
432
raise errors.LockNotHeld(self)
433
if self._map_updated:
435
if (stop_revision is not None and
436
not self._missing_revisions([stop_revision])):
438
graph = self.repository.get_graph()
439
if stop_revision is None:
440
all_revids = self.repository.all_revision_ids()
441
missing_revids = self._missing_revisions(all_revids)
443
heads = set([stop_revision])
444
missing_revids = self._missing_revisions(heads)
446
parents = graph.get_parent_map(heads)
448
for p in parents.values():
449
todo.update([x for x in p if x not in missing_revids])
450
heads = self._missing_revisions(todo)
451
missing_revids.update(heads)
452
if NULL_REVISION in missing_revids:
453
missing_revids.remove(NULL_REVISION)
454
missing_revids = self.repository.has_revisions(missing_revids)
455
if not missing_revids:
456
if stop_revision is None:
457
self._map_updated = True
459
self.start_write_group()
461
with ui.ui_factory.nested_progress_bar() as pb:
462
for i, revid in enumerate(graph.iter_topo_order(
464
trace.mutter('processing %r', revid)
465
pb.update("updating git map", i, len(missing_revids))
466
self._update_sha_map_revision(revid)
467
if stop_revision is None:
468
self._map_updated = True
469
except BaseException:
470
self.abort_write_group()
473
self.commit_write_group()
476
self._update_sha_map()
477
return iter(self._cache.idmap.sha1s())
479
def _reconstruct_commit(self, rev, tree_sha, lossy, verifiers):
480
"""Reconstruct a Commit object.
482
:param rev: Revision object
483
:param tree_sha: SHA1 of the root tree object
484
:param lossy: Whether or not to roundtrip bzr metadata
485
:param verifiers: Verifiers for the commits
486
:return: Commit object
488
def parent_lookup(revid):
490
return self._lookup_revision_sha1(revid)
491
except errors.NoSuchRevision:
493
return self.mapping.export_commit(rev, tree_sha, parent_lookup,
496
def _revision_to_objects(self, rev, tree, lossy, add_cache_entry=None):
497
"""Convert a revision to a set of git objects.
499
:param rev: Bazaar revision object
500
:param tree: Bazaar revision tree
501
:param lossy: Whether to not roundtrip all Bazaar revision data
503
unusual_modes = extract_unusual_modes(rev)
504
present_parents = self.repository.has_revisions(rev.parent_ids)
505
parent_trees = self.tree_cache.revision_trees(
506
[p for p in rev.parent_ids if p in present_parents])
508
for path, obj, bzr_key_data in _tree_to_objects(
509
tree, parent_trees, self._cache.idmap, unusual_modes,
510
self.mapping.BZR_DUMMY_FILE, add_cache_entry):
513
root_key_data = bzr_key_data
514
# Don't yield just yet
517
if root_tree is None:
518
# Pointless commit - get the tree sha elsewhere
519
if not rev.parent_ids:
522
base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
523
root_tree = self[self[base_sha1].tree]
524
root_key_data = (tree.path2id(''), tree.get_revision_id())
525
if add_cache_entry is not None:
526
add_cache_entry(root_tree, root_key_data, "")
529
testament3 = StrictTestament3(rev, tree)
530
verifiers = {"testament3-sha1": testament3.as_sha1()}
533
commit_obj = self._reconstruct_commit(rev, root_tree.id,
534
lossy=lossy, verifiers=verifiers)
536
foreign_revid, mapping = mapping_registry.parse_revision_id(
538
except errors.InvalidRevisionId:
541
_check_expected_sha(foreign_revid, commit_obj)
542
if add_cache_entry is not None:
543
add_cache_entry(commit_obj, verifiers, None)
545
yield None, commit_obj
547
def _get_updater(self, rev):
548
return self._cache.get_updater(rev)
550
def _update_sha_map_revision(self, revid):
551
rev = self.repository.get_revision(revid)
552
tree = self.tree_cache.revision_tree(rev.revision_id)
553
updater = self._get_updater(rev)
554
# FIXME JRV 2011-12-15: Shouldn't we try both values for lossy ?
555
for path, obj in self._revision_to_objects(
556
rev, tree, lossy=(not self.mapping.roundtripping),
557
add_cache_entry=updater.add_object):
558
if isinstance(obj, Commit):
560
commit_obj = updater.finish()
563
def _reconstruct_blobs(self, keys):
564
"""Return a Git Blob object from a fileid and revision stored in bzr.
566
:param fileid: File id of the text
567
:param revision: Revision of the text
569
stream = self.repository.iter_files_bytes(
570
((key[0], key[1], key) for key in keys))
571
for (file_id, revision, expected_sha), chunks in stream:
573
blob.chunked = list(chunks)
574
if blob.id != expected_sha and blob.data == b"":
575
# Perhaps it's a symlink ?
576
tree = self.tree_cache.revision_tree(revision)
577
path = tree.id2path(file_id)
578
if tree.kind(path) == 'symlink':
579
blob = symlink_to_blob(tree.get_symlink_target(path))
580
_check_expected_sha(expected_sha, blob)
583
def _reconstruct_tree(self, fileid, revid, bzr_tree, unusual_modes,
585
"""Return a Git Tree object from a file id and a revision stored in bzr.
587
:param fileid: fileid in the tree.
588
:param revision: Revision of the tree.
590
def get_ie_sha1(path, entry):
591
if entry.kind == "directory":
593
return self._cache.idmap.lookup_tree_id(entry.file_id,
595
except (NotImplementedError, KeyError):
596
obj = self._reconstruct_tree(
597
entry.file_id, revid, bzr_tree, unusual_modes)
602
elif entry.kind in ("file", "symlink"):
604
return self._cache.idmap.lookup_blob_id(entry.file_id,
608
return next(self._reconstruct_blobs(
609
[(entry.file_id, entry.revision, None)])).id
610
elif entry.kind == 'tree-reference':
611
# FIXME: Make sure the file id is the root id
612
return self._lookup_revision_sha1(entry.reference_revision)
614
raise AssertionError("unknown entry kind '%s'" % entry.kind)
615
path = bzr_tree.id2path(fileid)
616
tree = directory_to_tree(
618
bzr_tree.iter_child_entries(path),
619
get_ie_sha1, unusual_modes, self.mapping.BZR_DUMMY_FILE,
620
bzr_tree.path2id('') == fileid)
622
_check_expected_sha(expected_sha, tree)
625
def get_parents(self, sha):
626
"""Retrieve the parents of a Git commit by SHA1.
628
:param sha: SHA1 of the commit
629
:raises: KeyError, NotCommitError
631
return self[sha].parents
633
def _lookup_revision_sha1(self, revid):
634
"""Return the SHA1 matching a Bazaar revision."""
635
if revid == NULL_REVISION:
638
return self._cache.idmap.lookup_commit(revid)
641
return mapping_registry.parse_revision_id(revid)[0]
642
except errors.InvalidRevisionId:
643
self._update_sha_map(revid)
644
return self._cache.idmap.lookup_commit(revid)
646
def get_raw(self, sha):
647
"""Get the raw representation of a Git object by SHA1.
649
:param sha: SHA1 of the git object
652
sha = sha_to_hex(sha)
654
return (obj.type, obj.as_raw_string())
656
def __contains__(self, sha):
657
# See if sha is in map
659
for (type, type_data) in self.lookup_git_sha(sha):
661
if self.repository.has_revision(type_data[0]):
664
if type_data in self.repository.texts:
667
if self.repository.has_revision(type_data[1]):
670
raise AssertionError("Unknown object type '%s'" % type)
678
self._map_updated = False
679
self.repository.lock_read()
680
return LogicalLockResult(self.unlock)
682
def lock_write(self):
684
self._map_updated = False
685
self.repository.lock_write()
686
return LogicalLockResult(self.unlock)
689
return (self._locked is not None)
693
self._map_updated = False
694
self.repository.unlock()
696
def lookup_git_shas(self, shas):
700
ret[sha] = [("commit", (NULL_REVISION, None, {}))]
703
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
705
# if not, see if there are any unconverted revisions and
706
# add them to the map, search for sha in map again
707
self._update_sha_map()
709
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
714
def lookup_git_sha(self, sha):
715
return self.lookup_git_shas([sha])[sha]
717
def __getitem__(self, sha):
718
for (kind, type_data) in self.lookup_git_sha(sha):
719
# convert object to git object
721
(revid, tree_sha, verifiers) = type_data
723
rev = self.repository.get_revision(revid)
724
except errors.NoSuchRevision:
725
if revid == NULL_REVISION:
726
raise AssertionError(
727
"should not try to look up NULL_REVISION")
728
trace.mutter('entry for %s %s in shamap: %r, but not '
729
'found in repository', kind, sha, type_data)
731
# FIXME: the type data should say whether conversion was
733
commit = self._reconstruct_commit(
734
rev, tree_sha, lossy=(not self.mapping.roundtripping),
736
_check_expected_sha(sha, commit)
739
(fileid, revision) = type_data
740
blobs = self._reconstruct_blobs([(fileid, revision, sha)])
743
(fileid, revid) = type_data
745
tree = self.tree_cache.revision_tree(revid)
746
rev = self.repository.get_revision(revid)
747
except errors.NoSuchRevision:
749
'entry for %s %s in shamap: %r, but not found in '
750
'repository', kind, sha, type_data)
752
unusual_modes = extract_unusual_modes(rev)
754
return self._reconstruct_tree(
755
fileid, revid, tree, unusual_modes, expected_sha=sha)
756
except errors.NoSuchRevision:
759
raise AssertionError("Unknown object type '%s'" % kind)
763
def generate_lossy_pack_data(self, have, want, shallow=None,
765
get_tagged=None, ofs_delta=False):
766
return pack_objects_to_data(
767
self.generate_pack_contents(have, want, progress=progress,
768
shallow=shallow, get_tagged=get_tagged,
771
def generate_pack_contents(self, have, want, shallow=None, progress=None,
772
ofs_delta=False, get_tagged=None, lossy=False):
773
"""Iterate over the contents of a pack file.
775
:param have: List of SHA1s of objects that should not be sent
776
:param want: List of SHA1s of objects that should be sent
779
ret = self.lookup_git_shas(have + want)
780
for commit_sha in have:
781
commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
783
for (type, type_data) in ret[commit_sha]:
785
raise AssertionError("Type was %s, not commit" % type)
786
processed.add(type_data[0])
788
trace.mutter("unable to find remote ref %s", commit_sha)
790
for commit_sha in want:
791
if commit_sha in have:
794
for (type, type_data) in ret[commit_sha]:
796
raise AssertionError("Type was %s, not commit" % type)
797
pending.add(type_data[0])
801
for commit_sha in shallow or set():
803
for (type, type_data) in ret[commit_sha]:
805
raise AssertionError("Type was %s, not commit" % type)
806
shallows.add(type_data[0])
810
graph = self.repository.get_graph()
811
todo = _find_missing_bzr_revids(graph, pending, processed, shallow)
812
ret = PackTupleIterable(self)
813
with ui.ui_factory.nested_progress_bar() as pb:
814
for i, revid in enumerate(graph.iter_topo_order(todo)):
815
pb.update("generating git objects", i, len(todo))
817
rev = self.repository.get_revision(revid)
818
except errors.NoSuchRevision:
820
tree = self.tree_cache.revision_tree(revid)
821
for path, obj in self._revision_to_objects(
822
rev, tree, lossy=lossy):
823
ret.add(obj.id, path)
826
def add_thin_pack(self):
829
fd, path = tempfile.mkstemp(suffix=".pack")
830
f = os.fdopen(fd, 'wb')
833
from .fetch import import_git_objects
836
if os.path.getsize(path) == 0:
839
pd.create_index_v2(path[:-5] + ".idx", self.object_store.get_raw)
842
with self.repository.lock_write():
843
self.repository.start_write_group()
845
import_git_objects(self.repository, self.mapping,
846
p.iterobjects(get_raw=self.get_raw),
848
except BaseException:
849
self.repository.abort_write_group()
852
self.repository.commit_write_group()
855
# The pack isn't kept around anyway, so no point
856
# in treating full packs different from thin packs
857
add_pack = add_thin_pack