1
# Copyright (C) 2009-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
# Copyright (C) 2012 Canonical Ltd
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License as published by
6
# the Free Software Foundation; either version 2 of the License, or
7
# (at your option) any later version.
9
# This program is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU General Public License for more details.
14
# You should have received a copy of the GNU General Public License
15
# along with this program; if not, write to the Free Software
16
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""Map from Git sha's to Bazaar objects."""
20
from __future__ import absolute_import
22
from dulwich.objects import (
29
from dulwich.object_store import (
32
from dulwich.pack import (
45
from ..lock import LogicalLockResult
46
from ..revision import (
49
from ..sixish import viewitems
50
from ..tree import InterTree
51
from ..bzr.testament import (
56
from_repository as cache_from_repository,
58
from .mapping import (
62
extract_unusual_modes,
66
from .unpeel_map import (
74
BANNED_FILENAMES = ['.git']
77
def get_object_store(repo, mapping=None):
78
git = getattr(repo, "_git", None)
80
git.object_store.unlock = lambda: None
81
git.object_store.lock_read = lambda: LogicalLockResult(lambda: None)
82
git.object_store.lock_write = lambda: LogicalLockResult(lambda: None)
83
return git.object_store
84
return BazaarObjectStore(repo, mapping)
87
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
90
class LRUTreeCache(object):
92
def __init__(self, repository):
93
def approx_tree_size(tree):
94
# Very rough estimate, 250 per inventory entry
96
inv = tree.root_inventory
97
except AttributeError:
100
self.repository = repository
101
self._cache = lru_cache.LRUSizeCache(
102
max_size=MAX_TREE_CACHE_SIZE, after_cleanup_size=None,
103
compute_size=approx_tree_size)
105
def revision_tree(self, revid):
107
tree = self._cache[revid]
109
tree = self.repository.revision_tree(revid)
113
def iter_revision_trees(self, revids):
118
tree = self._cache[revid]
122
if tree.get_revision_id() != revid:
123
raise AssertionError(
124
"revision id did not match: %s != %s" % (
125
tree.get_revision_id(), revid))
127
for tree in self.repository.revision_trees(todo):
128
trees[tree.get_revision_id()] = tree
130
return (trees[r] for r in revids)
132
def revision_trees(self, revids):
133
return list(self.iter_revision_trees(revids))
136
self._cache[tree.get_revision_id()] = tree
139
def _find_missing_bzr_revids(graph, want, have, shallow=None):
140
"""Find the revisions that have to be pushed.
142
:param get_parent_map: Function that returns the parents for a sequence
144
:param want: Revisions the target wants
145
:param have: Revisions the target already has
146
:return: Set of revisions to fetch
150
# Shallows themselves still need to be fetched, but let's exclude their
152
for ps in graph.get_parent_map(shallow).values():
154
handled.add(NULL_REVISION)
157
extra_todo = graph.find_unique_ancestors(rev, handled)
158
todo.update(extra_todo)
159
handled.update(extra_todo)
163
def _check_expected_sha(expected_sha, object):
164
"""Check whether an object matches an expected SHA.
166
:param expected_sha: None or expected SHA as either binary or as hex digest
167
:param object: Object to verify
169
if expected_sha is None:
171
if len(expected_sha) == 40:
172
if expected_sha != object.sha().hexdigest().encode('ascii'):
173
raise AssertionError("Invalid sha for %r: %s" % (object,
175
elif len(expected_sha) == 20:
176
if expected_sha != object.sha().digest():
177
raise AssertionError("Invalid sha for %r: %s" % (
178
object, sha_to_hex(expected_sha)))
180
raise AssertionError("Unknown length %d for %r" % (len(expected_sha),
184
def directory_to_tree(path, children, lookup_ie_sha1, unusual_modes,
185
empty_file_name, allow_empty=False):
186
"""Create a Git Tree object from a Bazaar directory.
188
:param path: directory path
189
:param children: Children inventory entries
190
:param lookup_ie_sha1: Lookup the Git SHA1 for a inventory entry
191
:param unusual_modes: Dictionary with unusual file modes by file ids
192
:param empty_file_name: Name to use for dummy files in empty directories,
193
None to ignore empty directories.
196
for value in children:
197
if value.name in BANNED_FILENAMES:
199
child_path = osutils.pathjoin(path, value.name)
201
mode = unusual_modes[child_path]
203
mode = entry_mode(value)
204
hexsha = lookup_ie_sha1(child_path, value)
205
if hexsha is not None:
206
tree.add(encode_git_path(value.name), mode, hexsha)
207
if not allow_empty and len(tree) == 0:
208
# Only the root can be an empty tree
209
if empty_file_name is not None:
210
tree.add(empty_file_name, stat.S_IFREG | 0o644, Blob().id)
216
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
217
dummy_file_name=None, add_cache_entry=None):
218
"""Iterate over the objects that were introduced in a revision.
221
:param parent_trees: Parent revision trees
222
:param unusual_modes: Unusual file modes dictionary
223
:param dummy_file_name: File name to use for dummy files
224
in empty directories. None to skip empty directories
225
:return: Yields (path, object, ie) entries
231
base_tree = parent_trees[0]
232
other_parent_trees = parent_trees[1:]
234
base_tree = tree._repository.revision_tree(NULL_REVISION)
235
other_parent_trees = []
237
def find_unchanged_parent_ie(path, kind, other, parent_trees):
238
for ptree in parent_trees:
239
intertree = InterTree.get(ptree, tree)
240
ppath = intertree.find_source_path(path)
241
if ppath is not None:
242
pkind = ptree.kind(ppath)
244
if (pkind == "file" and
245
ptree.get_file_sha1(ppath) == other):
247
ptree.path2id(ppath), ptree.get_file_revision(ppath))
248
if kind == "symlink":
249
if (pkind == "symlink" and
250
ptree.get_symlink_target(ppath) == other):
252
ptree.path2id(ppath), ptree.get_file_revision(ppath))
255
# Find all the changed blobs
256
for change in tree.iter_changes(base_tree):
257
if change.name[1] in BANNED_FILENAMES:
259
if change.kind[1] == "file":
260
sha1 = tree.get_file_sha1(change.path[1])
263
(pfile_id, prevision) = find_unchanged_parent_ie(
264
change.path[1], change.kind[1], sha1, other_parent_trees)
268
# It existed in one of the parents, with the same contents.
269
# So no need to yield any new git objects.
271
blob_id = idmap.lookup_blob_id(
274
if not change.changed_content:
277
blob.data = tree.get_file_text(change.path[1])
280
new_blobs.append((change.path[1], change.file_id))
282
# TODO(jelmer): This code path does not have any test coverage.
283
shamap[change.path[1]] = blob_id
284
if add_cache_entry is not None:
287
(change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
288
elif change.kind[1] == "symlink":
289
target = tree.get_symlink_target(change.path[1])
290
blob = symlink_to_blob(target)
291
shamap[change.path[1]] = blob.id
292
if add_cache_entry is not None:
294
blob, (change.file_id, tree.get_file_revision(change.path[1])), change.path[1])
296
find_unchanged_parent_ie(
297
change.path[1], change.kind[1], target, other_parent_trees)
299
if change.changed_content:
300
yield (change.path[1], blob,
301
(change.file_id, tree.get_file_revision(change.path[1])))
302
elif change.kind[1] is None:
303
shamap[change.path[1]] = None
304
elif change.kind[1] != 'directory':
305
raise AssertionError(change.kind[1])
306
for p in change.path:
309
dirty_dirs.add(osutils.dirname(p))
311
# Fetch contents of the blobs that were changed
312
for (path, file_id), chunks in tree.iter_files_bytes(
313
[(path, (path, file_id)) for (path, file_id) in new_blobs]):
316
if add_cache_entry is not None:
317
add_cache_entry(obj, (file_id, tree.get_file_revision(path)), path)
318
yield path, obj, (file_id, tree.get_file_revision(path))
319
shamap[path] = obj.id
321
for path in unusual_modes:
322
dirty_dirs.add(posixpath.dirname(path))
324
for dir in list(dirty_dirs):
325
for parent in osutils.parent_directories(dir):
326
if parent in dirty_dirs:
328
dirty_dirs.add(parent)
333
def ie_to_hexsha(path, ie):
338
# FIXME: Should be the same as in parent
339
if ie.kind == "file":
341
return idmap.lookup_blob_id(ie.file_id, ie.revision)
345
blob.data = tree.get_file_text(path)
346
if add_cache_entry is not None:
347
add_cache_entry(blob, (ie.file_id, ie.revision), path)
349
elif ie.kind == "symlink":
351
return idmap.lookup_blob_id(ie.file_id, ie.revision)
354
target = tree.get_symlink_target(path)
355
blob = symlink_to_blob(target)
356
if add_cache_entry is not None:
357
add_cache_entry(blob, (ie.file_id, ie.revision), path)
359
elif ie.kind == "directory":
360
# Not all cache backends store the tree information,
361
# calculate again from scratch
362
ret = directory_to_tree(
363
path, ie.children.values(), ie_to_hexsha, unusual_modes,
364
dummy_file_name, ie.parent_id is None)
371
for path in sorted(dirty_dirs, reverse=True):
372
if not tree.has_filename(path):
375
if tree.kind(path) != 'directory':
378
obj = directory_to_tree(
379
path, tree.iter_child_entries(path), ie_to_hexsha, unusual_modes,
380
dummy_file_name, path == '')
383
file_id = tree.path2id(path)
384
if add_cache_entry is not None:
385
add_cache_entry(obj, (file_id, tree.get_revision_id()), path)
386
yield path, obj, (file_id, tree.get_revision_id())
387
shamap[path] = obj.id
390
class PackTupleIterable(object):
392
def __init__(self, store):
394
self.store.lock_read()
400
def add(self, sha, path):
401
self.objects[sha] = path
404
return len(self.objects)
407
return ((self.store[object_id], path) for (object_id, path) in
408
viewitems(self.objects))
411
class BazaarObjectStore(BaseObjectStore):
412
"""A Git-style object store backed onto a Bazaar repository."""
414
def __init__(self, repository, mapping=None):
415
self.repository = repository
416
self._map_updated = False
419
self.mapping = default_mapping
421
self.mapping = mapping
422
self._cache = cache_from_repository(repository)
423
self._content_cache_types = ("tree",)
424
self.start_write_group = self._cache.idmap.start_write_group
425
self.abort_write_group = self._cache.idmap.abort_write_group
426
self.commit_write_group = self._cache.idmap.commit_write_group
427
self.tree_cache = LRUTreeCache(self.repository)
428
self.unpeel_map = UnpeelMap.from_repository(self.repository)
430
def _missing_revisions(self, revisions):
431
return self._cache.idmap.missing_revisions(revisions)
433
def _update_sha_map(self, stop_revision=None):
434
if not self.is_locked():
435
raise errors.LockNotHeld(self)
436
if self._map_updated:
438
if (stop_revision is not None and
439
not self._missing_revisions([stop_revision])):
441
graph = self.repository.get_graph()
442
if stop_revision is None:
443
all_revids = self.repository.all_revision_ids()
444
missing_revids = self._missing_revisions(all_revids)
446
heads = set([stop_revision])
447
missing_revids = self._missing_revisions(heads)
449
parents = graph.get_parent_map(heads)
451
for p in parents.values():
452
todo.update([x for x in p if x not in missing_revids])
453
heads = self._missing_revisions(todo)
454
missing_revids.update(heads)
455
if NULL_REVISION in missing_revids:
456
missing_revids.remove(NULL_REVISION)
457
missing_revids = self.repository.has_revisions(missing_revids)
458
if not missing_revids:
459
if stop_revision is None:
460
self._map_updated = True
462
self.start_write_group()
464
with ui.ui_factory.nested_progress_bar() as pb:
465
for i, revid in enumerate(graph.iter_topo_order(
467
trace.mutter('processing %r', revid)
468
pb.update("updating git map", i, len(missing_revids))
469
self._update_sha_map_revision(revid)
470
if stop_revision is None:
471
self._map_updated = True
472
except BaseException:
473
self.abort_write_group()
476
self.commit_write_group()
479
self._update_sha_map()
480
return iter(self._cache.idmap.sha1s())
482
def _reconstruct_commit(self, rev, tree_sha, lossy, verifiers):
483
"""Reconstruct a Commit object.
485
:param rev: Revision object
486
:param tree_sha: SHA1 of the root tree object
487
:param lossy: Whether or not to roundtrip bzr metadata
488
:param verifiers: Verifiers for the commits
489
:return: Commit object
491
def parent_lookup(revid):
493
return self._lookup_revision_sha1(revid)
494
except errors.NoSuchRevision:
496
return self.mapping.export_commit(rev, tree_sha, parent_lookup,
499
def _revision_to_objects(self, rev, tree, lossy, add_cache_entry=None):
500
"""Convert a revision to a set of git objects.
502
:param rev: Bazaar revision object
503
:param tree: Bazaar revision tree
504
:param lossy: Whether to not roundtrip all Bazaar revision data
506
unusual_modes = extract_unusual_modes(rev)
507
present_parents = self.repository.has_revisions(rev.parent_ids)
508
parent_trees = self.tree_cache.revision_trees(
509
[p for p in rev.parent_ids if p in present_parents])
511
for path, obj, bzr_key_data in _tree_to_objects(
512
tree, parent_trees, self._cache.idmap, unusual_modes,
513
self.mapping.BZR_DUMMY_FILE, add_cache_entry):
516
root_key_data = bzr_key_data
517
# Don't yield just yet
520
if root_tree is None:
521
# Pointless commit - get the tree sha elsewhere
522
if not rev.parent_ids:
525
base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
526
root_tree = self[self[base_sha1].tree]
527
root_key_data = (tree.path2id(''), tree.get_revision_id())
528
if add_cache_entry is not None:
529
add_cache_entry(root_tree, root_key_data, "")
532
testament3 = StrictTestament3(rev, tree)
533
verifiers = {"testament3-sha1": testament3.as_sha1()}
536
commit_obj = self._reconstruct_commit(rev, root_tree.id,
537
lossy=lossy, verifiers=verifiers)
539
foreign_revid, mapping = mapping_registry.parse_revision_id(
541
except errors.InvalidRevisionId:
544
_check_expected_sha(foreign_revid, commit_obj)
545
if add_cache_entry is not None:
546
add_cache_entry(commit_obj, verifiers, None)
548
yield None, commit_obj
550
def _get_updater(self, rev):
551
return self._cache.get_updater(rev)
553
def _update_sha_map_revision(self, revid):
554
rev = self.repository.get_revision(revid)
555
tree = self.tree_cache.revision_tree(rev.revision_id)
556
updater = self._get_updater(rev)
557
# FIXME JRV 2011-12-15: Shouldn't we try both values for lossy ?
558
for path, obj in self._revision_to_objects(
559
rev, tree, lossy=(not self.mapping.roundtripping),
560
add_cache_entry=updater.add_object):
561
if isinstance(obj, Commit):
563
commit_obj = updater.finish()
566
def _reconstruct_blobs(self, keys):
567
"""Return a Git Blob object from a fileid and revision stored in bzr.
569
:param fileid: File id of the text
570
:param revision: Revision of the text
572
stream = self.repository.iter_files_bytes(
573
((key[0], key[1], key) for key in keys))
574
for (file_id, revision, expected_sha), chunks in stream:
576
blob.chunked = chunks
577
if blob.id != expected_sha and blob.data == b"":
578
# Perhaps it's a symlink ?
579
tree = self.tree_cache.revision_tree(revision)
580
path = tree.id2path(file_id)
581
if tree.kind(path) == 'symlink':
582
blob = symlink_to_blob(tree.get_symlink_target(path))
583
_check_expected_sha(expected_sha, blob)
586
def _reconstruct_tree(self, fileid, revid, bzr_tree, unusual_modes,
588
"""Return a Git Tree object from a file id and a revision stored in bzr.
590
:param fileid: fileid in the tree.
591
:param revision: Revision of the tree.
593
def get_ie_sha1(path, entry):
594
if entry.kind == "directory":
596
return self._cache.idmap.lookup_tree_id(entry.file_id,
598
except (NotImplementedError, KeyError):
599
obj = self._reconstruct_tree(
600
entry.file_id, revid, bzr_tree, unusual_modes)
605
elif entry.kind in ("file", "symlink"):
607
return self._cache.idmap.lookup_blob_id(entry.file_id,
611
return next(self._reconstruct_blobs(
612
[(entry.file_id, entry.revision, None)])).id
613
elif entry.kind == 'tree-reference':
614
# FIXME: Make sure the file id is the root id
615
return self._lookup_revision_sha1(entry.reference_revision)
617
raise AssertionError("unknown entry kind '%s'" % entry.kind)
618
path = bzr_tree.id2path(fileid)
619
tree = directory_to_tree(
621
bzr_tree.iter_child_entries(path),
622
get_ie_sha1, unusual_modes, self.mapping.BZR_DUMMY_FILE,
623
bzr_tree.path2id('') == fileid)
625
_check_expected_sha(expected_sha, tree)
628
def get_parents(self, sha):
629
"""Retrieve the parents of a Git commit by SHA1.
631
:param sha: SHA1 of the commit
632
:raises: KeyError, NotCommitError
634
return self[sha].parents
636
def _lookup_revision_sha1(self, revid):
637
"""Return the SHA1 matching a Bazaar revision."""
638
if revid == NULL_REVISION:
641
return self._cache.idmap.lookup_commit(revid)
644
return mapping_registry.parse_revision_id(revid)[0]
645
except errors.InvalidRevisionId:
646
self._update_sha_map(revid)
647
return self._cache.idmap.lookup_commit(revid)
649
def get_raw(self, sha):
650
"""Get the raw representation of a Git object by SHA1.
652
:param sha: SHA1 of the git object
655
sha = sha_to_hex(sha)
657
return (obj.type, obj.as_raw_string())
659
def __contains__(self, sha):
660
# See if sha is in map
662
for (type, type_data) in self.lookup_git_sha(sha):
664
if self.repository.has_revision(type_data[0]):
667
if type_data in self.repository.texts:
670
if self.repository.has_revision(type_data[1]):
673
raise AssertionError("Unknown object type '%s'" % type)
681
self._map_updated = False
682
self.repository.lock_read()
683
return LogicalLockResult(self.unlock)
685
def lock_write(self):
687
self._map_updated = False
688
self.repository.lock_write()
689
return LogicalLockResult(self.unlock)
692
return (self._locked is not None)
696
self._map_updated = False
697
self.repository.unlock()
699
def lookup_git_shas(self, shas):
703
ret[sha] = [("commit", (NULL_REVISION, None, {}))]
706
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
708
# if not, see if there are any unconverted revisions and
709
# add them to the map, search for sha in map again
710
self._update_sha_map()
712
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
717
def lookup_git_sha(self, sha):
718
return self.lookup_git_shas([sha])[sha]
720
def __getitem__(self, sha):
721
for (kind, type_data) in self.lookup_git_sha(sha):
722
# convert object to git object
724
(revid, tree_sha, verifiers) = type_data
726
rev = self.repository.get_revision(revid)
727
except errors.NoSuchRevision:
728
if revid == NULL_REVISION:
729
raise AssertionError(
730
"should not try to look up NULL_REVISION")
731
trace.mutter('entry for %s %s in shamap: %r, but not '
732
'found in repository', kind, sha, type_data)
734
# FIXME: the type data should say whether conversion was
736
commit = self._reconstruct_commit(
737
rev, tree_sha, lossy=(not self.mapping.roundtripping),
739
_check_expected_sha(sha, commit)
742
(fileid, revision) = type_data
743
blobs = self._reconstruct_blobs([(fileid, revision, sha)])
746
(fileid, revid) = type_data
748
tree = self.tree_cache.revision_tree(revid)
749
rev = self.repository.get_revision(revid)
750
except errors.NoSuchRevision:
752
'entry for %s %s in shamap: %r, but not found in '
753
'repository', kind, sha, type_data)
755
unusual_modes = extract_unusual_modes(rev)
757
return self._reconstruct_tree(
758
fileid, revid, tree, unusual_modes, expected_sha=sha)
759
except errors.NoSuchRevision:
762
raise AssertionError("Unknown object type '%s'" % kind)
766
def generate_lossy_pack_data(self, have, want, shallow=None,
768
get_tagged=None, ofs_delta=False):
769
return pack_objects_to_data(
770
self.generate_pack_contents(have, want, progress=progress,
771
shallow=shallow, get_tagged=get_tagged,
774
def generate_pack_contents(self, have, want, shallow=None, progress=None,
775
ofs_delta=False, get_tagged=None, lossy=False):
776
"""Iterate over the contents of a pack file.
778
:param have: List of SHA1s of objects that should not be sent
779
:param want: List of SHA1s of objects that should be sent
782
ret = self.lookup_git_shas(have + want)
783
for commit_sha in have:
784
commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
786
for (type, type_data) in ret[commit_sha]:
788
raise AssertionError("Type was %s, not commit" % type)
789
processed.add(type_data[0])
791
trace.mutter("unable to find remote ref %s", commit_sha)
793
for commit_sha in want:
794
if commit_sha in have:
797
for (type, type_data) in ret[commit_sha]:
799
raise AssertionError("Type was %s, not commit" % type)
800
pending.add(type_data[0])
804
for commit_sha in shallow or set():
806
for (type, type_data) in ret[commit_sha]:
808
raise AssertionError("Type was %s, not commit" % type)
809
shallows.add(type_data[0])
813
graph = self.repository.get_graph()
814
todo = _find_missing_bzr_revids(graph, pending, processed, shallow)
815
ret = PackTupleIterable(self)
816
with ui.ui_factory.nested_progress_bar() as pb:
817
for i, revid in enumerate(graph.iter_topo_order(todo)):
818
pb.update("generating git objects", i, len(todo))
820
rev = self.repository.get_revision(revid)
821
except errors.NoSuchRevision:
823
tree = self.tree_cache.revision_tree(revid)
824
for path, obj in self._revision_to_objects(
825
rev, tree, lossy=lossy):
826
ret.add(obj.id, path)
829
def add_thin_pack(self):
832
fd, path = tempfile.mkstemp(suffix=".pack")
833
f = os.fdopen(fd, 'wb')
836
from .fetch import import_git_objects
839
if os.path.getsize(path) == 0:
842
pd.create_index_v2(path[:-5] + ".idx", self.object_store.get_raw)
845
with self.repository.lock_write():
846
self.repository.start_write_group()
848
import_git_objects(self.repository, self.mapping,
849
p.iterobjects(get_raw=self.get_raw),
851
except BaseException:
852
self.repository.abort_write_group()
855
self.repository.commit_write_group()
858
# The pack isn't kept around anyway, so no point
859
# in treating full packs different from thin packs
860
add_pack = add_thin_pack