1
# Copyright (C) 2009-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
# Copyright (C) 2012 Canonical Ltd
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License as published by
6
# the Free Software Foundation; either version 2 of the License, or
7
# (at your option) any later version.
9
# This program is distributed in the hope that it will be useful,
10
# but WITHOUT ANY WARRANTY; without even the implied warranty of
11
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
# GNU General Public License for more details.
14
# You should have received a copy of the GNU General Public License
15
# along with this program; if not, write to the Free Software
16
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""Map from Git sha's to Bazaar objects."""
20
from __future__ import absolute_import
22
from dulwich.objects import (
29
from dulwich.object_store import (
32
from dulwich.pack import (
46
from ..lock import LogicalLockResult
47
from ..revision import (
50
from ..sixish import viewitems
51
from ..testament import(
56
from_repository as cache_from_repository,
58
from .mapping import (
61
extract_unusual_modes,
65
from .unpeel_map import (
73
BANNED_FILENAMES = ['.git']
76
def get_object_store(repo, mapping=None):
77
git = getattr(repo, "_git", None)
79
git.object_store.unlock = lambda: None
80
git.object_store.lock_read = lambda: LogicalLockResult(lambda: None)
81
git.object_store.lock_write = lambda: LogicalLockResult(lambda: None)
82
return git.object_store
83
return BazaarObjectStore(repo, mapping)
86
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
89
class LRUTreeCache(object):
91
def __init__(self, repository):
92
def approx_tree_size(tree):
93
# Very rough estimate, 250 per inventory entry
95
inv = tree.root_inventory
96
except AttributeError:
99
self.repository = repository
100
self._cache = lru_cache.LRUSizeCache(max_size=MAX_TREE_CACHE_SIZE,
101
after_cleanup_size=None, compute_size=approx_tree_size)
103
def revision_tree(self, revid):
105
tree = self._cache[revid]
107
tree = self.repository.revision_tree(revid)
111
def iter_revision_trees(self, revids):
116
tree = self._cache[revid]
120
if tree.get_revision_id() != revid:
121
raise AssertionError(
122
"revision id did not match: %s != %s" % (
123
tree.get_revision_id(), revid))
125
for tree in self.repository.revision_trees(todo):
126
trees[tree.get_revision_id()] = tree
128
return (trees[r] for r in revids)
130
def revision_trees(self, revids):
131
return list(self.iter_revision_trees(revids))
134
self._cache[tree.get_revision_id()] = tree
137
def _find_missing_bzr_revids(graph, want, have):
138
"""Find the revisions that have to be pushed.
140
:param get_parent_map: Function that returns the parents for a sequence
142
:param want: Revisions the target wants
143
:param have: Revisions the target already has
144
:return: Set of revisions to fetch
149
extra_todo = graph.find_unique_ancestors(rev, handled)
150
todo.update(extra_todo)
151
handled.update(extra_todo)
152
if NULL_REVISION in todo:
153
todo.remove(NULL_REVISION)
157
def _check_expected_sha(expected_sha, object):
158
"""Check whether an object matches an expected SHA.
160
:param expected_sha: None or expected SHA as either binary or as hex digest
161
:param object: Object to verify
163
if expected_sha is None:
165
if len(expected_sha) == 40:
166
if expected_sha != object.sha().hexdigest().encode('ascii'):
167
raise AssertionError("Invalid sha for %r: %s" % (object,
169
elif len(expected_sha) == 20:
170
if expected_sha != object.sha().digest():
171
raise AssertionError("Invalid sha for %r: %s" % (object,
172
sha_to_hex(expected_sha)))
174
raise AssertionError("Unknown length %d for %r" % (len(expected_sha),
178
def directory_to_tree(path, children, lookup_ie_sha1, unusual_modes, empty_file_name,
180
"""Create a Git Tree object from a Bazaar directory.
182
:param path: directory path
183
:param children: Children inventory entries
184
:param lookup_ie_sha1: Lookup the Git SHA1 for a inventory entry
185
:param unusual_modes: Dictionary with unusual file modes by file ids
186
:param empty_file_name: Name to use for dummy files in empty directories,
187
None to ignore empty directories.
190
for value in children:
191
if value.name in BANNED_FILENAMES:
193
child_path = osutils.pathjoin(path, value.name)
195
mode = unusual_modes[child_path]
197
mode = entry_mode(value)
198
hexsha = lookup_ie_sha1(child_path, value)
199
if hexsha is not None:
200
tree.add(value.name.encode("utf-8"), mode, hexsha)
201
if not allow_empty and len(tree) == 0:
202
# Only the root can be an empty tree
203
if empty_file_name is not None:
204
tree.add(empty_file_name, stat.S_IFREG | 0o644, Blob().id)
210
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
211
dummy_file_name=None, add_cache_entry=None):
212
"""Iterate over the objects that were introduced in a revision.
215
:param parent_trees: Parent revision trees
216
:param unusual_modes: Unusual file modes dictionary
217
:param dummy_file_name: File name to use for dummy files
218
in empty directories. None to skip empty directories
219
:return: Yields (path, object, ie) entries
226
base_tree = parent_trees[0]
227
other_parent_trees = parent_trees[1:]
229
base_tree = tree._repository.revision_tree(NULL_REVISION)
230
other_parent_trees = []
231
def find_unchanged_parent_ie(file_id, kind, other, parent_trees):
232
for ptree in parent_trees:
234
ppath = ptree.id2path(file_id)
235
except errors.NoSuchId:
238
pkind = ptree.kind(ppath, file_id)
240
if (pkind == "file" and
241
ptree.get_file_sha1(ppath, file_id) == other):
242
return (file_id, ptree.get_file_revision(ppath, file_id))
243
if kind == "symlink":
244
if (pkind == "symlink" and
245
ptree.get_symlink_target(ppath, file_id) == other):
246
return (file_id, ptree.get_file_revision(ppath, file_id))
249
# Find all the changed blobs
250
for (file_id, path, changed_content, versioned, parent, name, kind,
251
executable) in tree.iter_changes(base_tree):
252
if name[1] in BANNED_FILENAMES:
254
if kind[1] == "file":
255
sha1 = tree.get_file_sha1(path[1], file_id)
258
(pfile_id, prevision) = find_unchanged_parent_ie(file_id, kind[1], sha1, other_parent_trees)
262
# It existed in one of the parents, with the same contents.
263
# So no need to yield any new git objects.
265
blob_id = idmap.lookup_blob_id(
268
if not changed_content:
271
blob.data = tree.get_file_text(path[1], file_id)
274
new_blobs.append((path[1], file_id))
276
shamap[path[1]] = blob_id
277
if add_cache_entry is not None:
278
add_cache_entry(("blob", blob_id), (file_id, tree.get_file_revision(path[1])), path[1])
279
elif kind[1] == "symlink":
280
target = tree.get_symlink_target(path[1], file_id)
281
blob = symlink_to_blob(target)
282
shamap[path[1]] = blob.id
283
if add_cache_entry is not None:
284
add_cache_entry(blob, (file_id, tree.get_file_revision(path[1])), path[1])
286
find_unchanged_parent_ie(file_id, kind[1], target, other_parent_trees)
289
yield path[1], blob, (file_id, tree.get_file_revision(path[1], file_id))
290
elif kind[1] is None:
291
shamap[path[1]] = None
292
elif kind[1] != 'directory':
293
raise AssertionError(kind[1])
297
dirty_dirs.add(osutils.dirname(p))
299
# Fetch contents of the blobs that were changed
300
for (path, file_id), chunks in tree.iter_files_bytes(
301
[(path, (path, file_id)) for (path, file_id) in new_blobs]):
304
if add_cache_entry is not None:
305
add_cache_entry(obj, (file_id, tree.get_file_revision(path)), path)
306
yield path, obj, (file_id, tree.get_file_revision(path, file_id))
307
shamap[path] = obj.id
309
for path in unusual_modes:
310
dirty_dirs.add(posixpath.dirname(path))
312
for dir in list(dirty_dirs):
313
for parent in osutils.parent_directories(dir):
314
if parent in dirty_dirs:
316
dirty_dirs.add(parent)
321
def ie_to_hexsha(path, ie):
326
# FIXME: Should be the same as in parent
327
if ie.kind in ("file", "symlink"):
329
return idmap.lookup_blob_id(ie.file_id, ie.revision)
333
blob.data = tree.get_file_text(path, ie.file_id)
334
if add_cache_entry is not None:
335
add_cache_entry(blob, (ie.file_id, ie.revision), path)
337
elif ie.kind == "directory":
338
# Not all cache backends store the tree information,
339
# calculate again from scratch
340
ret = directory_to_tree(path, ie.children.values(), ie_to_hexsha,
341
unusual_modes, dummy_file_name, ie.parent_id is None)
348
for path in sorted(dirty_dirs, reverse=True):
349
if not tree.has_filename(path):
352
if tree.kind(path) != 'directory':
356
for value in tree.iter_child_entries(path):
357
if value.name in BANNED_FILENAMES:
358
trace.warning('not exporting %s with banned filename %s',
359
value.kind, value.name)
361
child_path = osutils.pathjoin(path, value.name)
363
mode = unusual_modes[child_path]
365
mode = entry_mode(value)
366
hexsha = ie_to_hexsha(child_path, value)
367
if hexsha is not None:
368
obj.add(value.name.encode("utf-8"), mode, hexsha)
371
file_id = tree.path2id(path)
372
if add_cache_entry is not None:
373
add_cache_entry(obj, (file_id, tree.get_revision_id()), path)
374
yield path, obj, (file_id, tree.get_revision_id())
375
shamap[path] = obj.id
378
class PackTupleIterable(object):
380
def __init__(self, store):
382
self.store.lock_read()
388
def add(self, sha, path):
389
self.objects[sha] = path
392
return len(self.objects)
395
return ((self.store[object_id], path) for (object_id, path) in
396
viewitems(self.objects))
399
class BazaarObjectStore(BaseObjectStore):
400
"""A Git-style object store backed onto a Bazaar repository."""
402
def __init__(self, repository, mapping=None):
403
self.repository = repository
404
self._map_updated = False
407
self.mapping = default_mapping
409
self.mapping = mapping
410
self._cache = cache_from_repository(repository)
411
self._content_cache_types = ("tree",)
412
self.start_write_group = self._cache.idmap.start_write_group
413
self.abort_write_group = self._cache.idmap.abort_write_group
414
self.commit_write_group = self._cache.idmap.commit_write_group
415
self.tree_cache = LRUTreeCache(self.repository)
416
self.unpeel_map = UnpeelMap.from_repository(self.repository)
418
def _missing_revisions(self, revisions):
419
return self._cache.idmap.missing_revisions(revisions)
421
def _update_sha_map(self, stop_revision=None):
422
if not self.is_locked():
423
raise errors.LockNotHeld(self)
424
if self._map_updated:
426
if (stop_revision is not None and
427
not self._missing_revisions([stop_revision])):
429
graph = self.repository.get_graph()
430
if stop_revision is None:
431
all_revids = self.repository.all_revision_ids()
432
missing_revids = self._missing_revisions(all_revids)
434
heads = set([stop_revision])
435
missing_revids = self._missing_revisions(heads)
437
parents = graph.get_parent_map(heads)
439
for p in parents.values():
440
todo.update([x for x in p if x not in missing_revids])
441
heads = self._missing_revisions(todo)
442
missing_revids.update(heads)
443
if NULL_REVISION in missing_revids:
444
missing_revids.remove(NULL_REVISION)
445
missing_revids = self.repository.has_revisions(missing_revids)
446
if not missing_revids:
447
if stop_revision is None:
448
self._map_updated = True
450
self.start_write_group()
452
pb = ui.ui_factory.nested_progress_bar()
454
for i, revid in enumerate(graph.iter_topo_order(missing_revids)):
455
trace.mutter('processing %r', revid)
456
pb.update("updating git map", i, len(missing_revids))
457
self._update_sha_map_revision(revid)
460
if stop_revision is None:
461
self._map_updated = True
463
self.abort_write_group()
466
self.commit_write_group()
469
self._update_sha_map()
470
return iter(self._cache.idmap.sha1s())
472
def _reconstruct_commit(self, rev, tree_sha, lossy, verifiers):
473
"""Reconstruct a Commit object.
475
:param rev: Revision object
476
:param tree_sha: SHA1 of the root tree object
477
:param lossy: Whether or not to roundtrip bzr metadata
478
:param verifiers: Verifiers for the commits
479
:return: Commit object
481
def parent_lookup(revid):
483
return self._lookup_revision_sha1(revid)
484
except errors.NoSuchRevision:
486
return self.mapping.export_commit(rev, tree_sha, parent_lookup,
489
def _create_fileid_map_blob(self, tree):
490
# FIXME: This can probably be a lot more efficient,
491
# not all files necessarily have to be processed.
493
for (path, ie) in tree.iter_entries_by_dir():
494
if self.mapping.generate_file_id(path) != ie.file_id:
495
file_ids[path] = ie.file_id
496
return self.mapping.export_fileid_map(file_ids)
498
def _revision_to_objects(self, rev, tree, lossy, add_cache_entry=None):
499
"""Convert a revision to a set of git objects.
501
:param rev: Bazaar revision object
502
:param tree: Bazaar revision tree
503
:param lossy: Whether to not roundtrip all Bazaar revision data
505
unusual_modes = extract_unusual_modes(rev)
506
present_parents = self.repository.has_revisions(rev.parent_ids)
507
parent_trees = self.tree_cache.revision_trees(
508
[p for p in rev.parent_ids if p in present_parents])
510
for path, obj, bzr_key_data in _tree_to_objects(tree, parent_trees,
511
self._cache.idmap, unusual_modes,
512
self.mapping.BZR_DUMMY_FILE, add_cache_entry):
515
root_key_data = bzr_key_data
516
# Don't yield just yet
519
if root_tree is None:
520
# Pointless commit - get the tree sha elsewhere
521
if not rev.parent_ids:
524
base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
525
root_tree = self[self[base_sha1].tree]
526
root_key_data = (tree.get_root_id(), tree.get_revision_id())
527
if not lossy and self.mapping.BZR_FILE_IDS_FILE is not None:
528
b = self._create_fileid_map_blob(tree)
530
root_tree[self.mapping.BZR_FILE_IDS_FILE] = (
531
(stat.S_IFREG | 0o644), b.id)
532
yield self.mapping.BZR_FILE_IDS_FILE, b
533
if add_cache_entry is not None:
534
add_cache_entry(root_tree, root_key_data, "")
537
testament3 = StrictTestament3(rev, tree)
538
verifiers = { "testament3-sha1": testament3.as_sha1() }
541
commit_obj = self._reconstruct_commit(rev, root_tree.id,
542
lossy=lossy, verifiers=verifiers)
544
foreign_revid, mapping = mapping_registry.parse_revision_id(
546
except errors.InvalidRevisionId:
549
_check_expected_sha(foreign_revid, commit_obj)
550
if add_cache_entry is not None:
551
add_cache_entry(commit_obj, verifiers, None)
553
yield None, commit_obj
555
def _get_updater(self, rev):
556
return self._cache.get_updater(rev)
558
def _update_sha_map_revision(self, revid):
559
rev = self.repository.get_revision(revid)
560
tree = self.tree_cache.revision_tree(rev.revision_id)
561
updater = self._get_updater(rev)
562
# FIXME JRV 2011-12-15: Shouldn't we try both values for lossy ?
563
for path, obj in self._revision_to_objects(
564
rev, tree, lossy=(not self.mapping.roundtripping),
565
add_cache_entry=updater.add_object):
566
if isinstance(obj, Commit):
568
commit_obj = updater.finish()
571
def _reconstruct_blobs(self, keys):
572
"""Return a Git Blob object from a fileid and revision stored in bzr.
574
:param fileid: File id of the text
575
:param revision: Revision of the text
577
stream = self.repository.iter_files_bytes(
578
((key[0], key[1], key) for key in keys))
579
for (file_id, revision, expected_sha), chunks in stream:
581
blob.chunked = chunks
582
if blob.id != expected_sha and blob.data == "":
583
# Perhaps it's a symlink ?
584
tree = self.tree_cache.revision_tree(revision)
585
path = tree.id2path(file_id)
586
if tree.kind(path, file_id) == 'symlink':
587
blob = symlink_to_blob(tree.get_symlink_target(path, file_id))
588
_check_expected_sha(expected_sha, blob)
591
def _reconstruct_tree(self, fileid, revid, bzr_tree, unusual_modes,
593
"""Return a Git Tree object from a file id and a revision stored in bzr.
595
:param fileid: fileid in the tree.
596
:param revision: Revision of the tree.
598
def get_ie_sha1(path, entry):
599
if entry.kind == "directory":
601
return self._cache.idmap.lookup_tree_id(entry.file_id,
603
except (NotImplementedError, KeyError):
604
obj = self._reconstruct_tree(entry.file_id, revid, bzr_tree,
610
elif entry.kind in ("file", "symlink"):
612
return self._cache.idmap.lookup_blob_id(entry.file_id,
616
return next(self._reconstruct_blobs(
617
[(entry.file_id, entry.revision, None)])).id
618
elif entry.kind == 'tree-reference':
619
# FIXME: Make sure the file id is the root id
620
return self._lookup_revision_sha1(entry.reference_revision)
622
raise AssertionError("unknown entry kind '%s'" % entry.kind)
623
path = bzr_tree.id2path(fileid)
624
tree = directory_to_tree(
626
bzr_tree.iter_child_entries(path),
627
get_ie_sha1, unusual_modes, self.mapping.BZR_DUMMY_FILE,
628
bzr_tree.get_root_id() == fileid)
629
if (bzr_tree.get_root_id() == fileid and
630
self.mapping.BZR_FILE_IDS_FILE is not None):
633
b = self._create_fileid_map_blob(bzr_tree)
634
# If this is the root tree, add the file ids
635
tree[self.mapping.BZR_FILE_IDS_FILE] = (
636
(stat.S_IFREG | 0o644), b.id)
638
_check_expected_sha(expected_sha, tree)
641
def get_parents(self, sha):
642
"""Retrieve the parents of a Git commit by SHA1.
644
:param sha: SHA1 of the commit
645
:raises: KeyError, NotCommitError
647
return self[sha].parents
649
def _lookup_revision_sha1(self, revid):
650
"""Return the SHA1 matching a Bazaar revision."""
651
if revid == NULL_REVISION:
654
return self._cache.idmap.lookup_commit(revid)
657
return mapping_registry.parse_revision_id(revid)[0]
658
except errors.InvalidRevisionId:
659
self._update_sha_map(revid)
660
return self._cache.idmap.lookup_commit(revid)
662
def get_raw(self, sha):
663
"""Get the raw representation of a Git object by SHA1.
665
:param sha: SHA1 of the git object
668
sha = sha_to_hex(sha)
670
return (obj.type, obj.as_raw_string())
672
def __contains__(self, sha):
673
# See if sha is in map
675
for (type, type_data) in self.lookup_git_sha(sha):
677
if self.repository.has_revision(type_data[0]):
680
if type_data in self.repository.texts:
683
if self.repository.has_revision(type_data[1]):
686
raise AssertionError("Unknown object type '%s'" % type)
694
self._map_updated = False
695
self.repository.lock_read()
696
return LogicalLockResult(self.unlock)
698
def lock_write(self):
700
self._map_updated = False
701
self.repository.lock_write()
702
return LogicalLockResult(self.unlock)
705
return (self._locked is not None)
709
self._map_updated = False
710
self.repository.unlock()
712
def lookup_git_shas(self, shas):
716
ret[sha] = [("commit", (NULL_REVISION, None, {}))]
719
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
721
# if not, see if there are any unconverted revisions and
722
# add them to the map, search for sha in map again
723
self._update_sha_map()
725
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
730
def lookup_git_sha(self, sha):
731
return self.lookup_git_shas([sha])[sha]
733
def __getitem__(self, sha):
734
for (kind, type_data) in self.lookup_git_sha(sha):
735
# convert object to git object
737
(revid, tree_sha, verifiers) = type_data
739
rev = self.repository.get_revision(revid)
740
except errors.NoSuchRevision:
741
if revid == NULL_REVISION:
742
raise AssertionError(
743
"should not try to look up NULL_REVISION")
744
trace.mutter('entry for %s %s in shamap: %r, but not '
745
'found in repository', kind, sha, type_data)
747
# FIXME: the type data should say whether conversion was lossless
748
commit = self._reconstruct_commit(rev, tree_sha,
749
lossy=(not self.mapping.roundtripping), verifiers=verifiers)
750
_check_expected_sha(sha, commit)
753
(fileid, revision) = type_data
754
blobs = self._reconstruct_blobs([(fileid, revision, sha)])
757
(fileid, revid) = type_data
759
tree = self.tree_cache.revision_tree(revid)
760
rev = self.repository.get_revision(revid)
761
except errors.NoSuchRevision:
762
trace.mutter('entry for %s %s in shamap: %r, but not found in '
763
'repository', kind, sha, type_data)
765
unusual_modes = extract_unusual_modes(rev)
767
return self._reconstruct_tree(fileid, revid,
768
tree, unusual_modes, expected_sha=sha)
769
except errors.NoSuchRevision:
772
raise AssertionError("Unknown object type '%s'" % kind)
776
def generate_lossy_pack_data(self, have, want, progress=None,
777
get_tagged=None, ofs_delta=False):
778
return pack_objects_to_data(
779
self.generate_pack_contents(have, want, progress, get_tagged,
782
def generate_pack_contents(self, have, want, progress=None,
783
ofs_delta=False, get_tagged=None, lossy=False):
784
"""Iterate over the contents of a pack file.
786
:param have: List of SHA1s of objects that should not be sent
787
:param want: List of SHA1s of objects that should be sent
790
ret = self.lookup_git_shas(have + want)
791
for commit_sha in have:
792
commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
794
for (type, type_data) in ret[commit_sha]:
796
raise AssertionError("Type was %s, not commit" % type)
797
processed.add(type_data[0])
799
trace.mutter("unable to find remote ref %s", commit_sha)
801
for commit_sha in want:
802
if commit_sha in have:
805
for (type, type_data) in ret[commit_sha]:
807
raise AssertionError("Type was %s, not commit" % type)
808
pending.add(type_data[0])
812
graph = self.repository.get_graph()
813
todo = _find_missing_bzr_revids(graph, pending, processed)
814
ret = PackTupleIterable(self)
815
pb = ui.ui_factory.nested_progress_bar()
817
for i, revid in enumerate(graph.iter_topo_order(todo)):
818
pb.update("generating git objects", i, len(todo))
820
rev = self.repository.get_revision(revid)
821
except errors.NoSuchRevision:
823
tree = self.tree_cache.revision_tree(revid)
824
for path, obj in self._revision_to_objects(
825
rev, tree, lossy=lossy):
826
ret.add(obj.id, path)
831
def add_thin_pack(self):
834
fd, path = tempfile.mkstemp(suffix=".pack")
835
f = os.fdopen(fd, 'wb')
837
from .fetch import import_git_objects
840
if os.path.getsize(path) == 0:
843
pd.create_index_v2(path[:-5]+".idx", self.object_store.get_raw)
846
with self.repository.lock_write():
847
self.repository.start_write_group()
849
import_git_objects(self.repository, self.mapping,
850
p.iterobjects(get_raw=self.get_raw),
853
self.repository.abort_write_group()
856
self.repository.commit_write_group()
859
# The pack isn't kept around anyway, so no point
860
# in treating full packs different from thin packs
861
add_pack = add_thin_pack