1
# Copyright (C) 2009 Jelmer Vernooij <jelmer@samba.org>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""Map from Git sha's to Bazaar objects."""
19
from dulwich.objects import (
26
from dulwich.object_store import (
37
from bzrlib.lock import LogicalLockResult
38
from bzrlib.revision import (
41
from bzrlib.testament import(
45
from bzrlib.plugins.git.cache import (
46
from_repository as cache_from_repository,
48
from bzrlib.plugins.git.mapping import (
51
extract_unusual_modes,
55
from bzrlib.plugins.git.unpeel_map import (
63
def get_object_store(repo, mapping=None):
64
git = getattr(repo, "_git", None)
66
git.object_store.unlock = lambda: None
67
git.object_store.lock_read = lambda: LogicalLockResult(lambda: None)
68
git.object_store.lock_write = lambda: LogicalLockResult(lambda: None)
69
return git.object_store
70
return BazaarObjectStore(repo, mapping)
73
MAX_TREE_CACHE_SIZE = 50 * 1024 * 1024
76
class LRUTreeCache(object):
78
def __init__(self, repository):
79
def approx_tree_size(tree):
80
# Very rough estimate, 1k per inventory entry
81
return len(tree.inventory) * 1024
82
self.repository = repository
83
self._cache = lru_cache.LRUSizeCache(max_size=MAX_TREE_CACHE_SIZE,
84
after_cleanup_size=None, compute_size=approx_tree_size)
86
def revision_tree(self, revid):
88
tree = self._cache[revid]
90
tree = self.repository.revision_tree(revid)
92
assert tree.get_revision_id() == tree.inventory.revision_id
95
def iter_revision_trees(self, revids):
100
tree = self._cache[revid]
104
assert tree.get_revision_id() == revid
106
for tree in self.repository.revision_trees(todo):
107
trees[tree.get_revision_id()] = tree
109
return (trees[r] for r in revids)
111
def revision_trees(self, revids):
112
return list(self.iter_revision_trees(revids))
115
self._cache[tree.get_revision_id()] = tree
118
def _find_missing_bzr_revids(graph, want, have):
119
"""Find the revisions that have to be pushed.
121
:param get_parent_map: Function that returns the parents for a sequence
123
:param want: Revisions the target wants
124
:param have: Revisions the target already has
125
:return: Set of revisions to fetch
130
extra_todo = graph.find_unique_ancestors(rev, handled)
131
todo.update(extra_todo)
132
handled.update(extra_todo)
133
if NULL_REVISION in todo:
134
todo.remove(NULL_REVISION)
138
def _check_expected_sha(expected_sha, object):
139
"""Check whether an object matches an expected SHA.
141
:param expected_sha: None or expected SHA as either binary or as hex digest
142
:param object: Object to verify
144
if expected_sha is None:
146
if len(expected_sha) == 40:
147
if expected_sha != object.sha().hexdigest():
148
raise AssertionError("Invalid sha for %r: %s" % (object,
150
elif len(expected_sha) == 20:
151
if expected_sha != object.sha().digest():
152
raise AssertionError("Invalid sha for %r: %s" % (object,
153
sha_to_hex(expected_sha)))
155
raise AssertionError("Unknown length %d for %r" % (len(expected_sha),
159
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
160
dummy_file_name=None):
161
"""Iterate over the objects that were introduced in a revision.
164
:param parent_trees: Parent revision trees
165
:param unusual_modes: Unusual file modes dictionary
166
:param dummy_file_name: File name to use for dummy files
167
in empty directories. None to skip empty directories
168
:return: Yields (path, object, ie) entries
174
base_tree = parent_trees[0]
175
other_parent_trees = parent_trees[1:]
177
base_tree = tree._repository.revision_tree(NULL_REVISION)
178
other_parent_trees = []
179
def find_unchanged_parent_ie(ie, parent_trees):
180
assert ie.kind in ("symlink", "file")
181
for ptree in parent_trees:
183
pie = ptree.inventory[ie.file_id]
184
except errors.NoSuchId:
187
if (pie.text_sha1 == ie.text_sha1 and
188
pie.kind == ie.kind and
189
pie.symlink_target == ie.symlink_target):
193
# Find all the changed blobs
194
for (file_id, path, changed_content, versioned, parent, name, kind,
195
executable) in tree.iter_changes(base_tree):
196
if kind[1] == "file":
197
ie = tree.inventory[file_id]
200
pie = find_unchanged_parent_ie(ie, other_parent_trees)
205
shamap[ie.file_id] = idmap.lookup_blob_id(
206
pie.file_id, pie.revision)
210
blob.data = tree.get_file_text(ie.file_id)
211
shamap[ie.file_id] = blob.id
212
if not file_id in shamap:
213
new_blobs.append((path[1], ie))
214
new_trees[posixpath.dirname(path[1])] = parent[1]
215
elif kind[1] == "symlink":
216
ie = tree.inventory[file_id]
218
blob = symlink_to_blob(ie)
219
shamap[file_id] = blob.id
221
find_unchanged_parent_ie(ie, other_parent_trees)
223
yield path[1], blob, ie
224
new_trees[posixpath.dirname(path[1])] = parent[1]
225
elif kind[1] not in (None, "directory"):
226
raise AssertionError(kind[1])
227
if (path[0] not in (None, "") and
228
tree.has_id(parent[0]) and
229
tree.inventory[parent[0]].kind == "directory"):
231
new_trees[posixpath.dirname(path[0])] = parent[0]
233
# Fetch contents of the blobs that were changed
234
for (path, ie), chunks in tree.iter_files_bytes(
235
[(ie.file_id, (path, ie)) for (path, ie) in new_blobs]):
239
shamap[ie.file_id] = obj.id
241
for path in unusual_modes:
242
parent_path = posixpath.dirname(path)
243
new_trees[parent_path] = tree.path2id(parent_path)
247
items = new_trees.items()
249
for path, file_id in items:
250
parent_id = tree.inventory[file_id].parent_id
251
if parent_id is not None:
252
parent_path = urlutils.dirname(path)
253
new_trees[parent_path] = parent_id
254
trees[path] = file_id
256
def ie_to_hexsha(ie):
258
return shamap[ie.file_id]
260
# FIXME: Should be the same as in parent
261
if ie.kind in ("file", "symlink"):
263
return idmap.lookup_blob_id(ie.file_id, ie.revision)
267
blob.data = tree.get_file_text(ie.file_id)
269
elif ie.kind == "directory":
270
# Not all cache backends store the tree information,
271
# calculate again from scratch
272
ret = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
280
for path in sorted(trees.keys(), reverse=True):
281
ie = tree.inventory[trees[path]]
282
assert ie.kind == "directory"
283
obj = directory_to_tree(ie, ie_to_hexsha, unusual_modes,
287
shamap[ie.file_id] = obj.id
290
class PackTupleIterable(object):
292
def __init__(self, store):
294
self.store.lock_read()
300
def add(self, sha, path):
301
self.objects[sha] = path
304
return len(self.objects)
307
return ((self.store[object_id], path) for (object_id, path) in
308
self.objects.iteritems())
311
class BazaarObjectStore(BaseObjectStore):
312
"""A Git-style object store backed onto a Bazaar repository."""
314
def __init__(self, repository, mapping=None):
315
self.repository = repository
316
self._map_updated = False
319
self.mapping = default_mapping
321
self.mapping = mapping
322
self._cache = cache_from_repository(repository)
323
self._content_cache_types = ("tree",)
324
self.start_write_group = self._cache.idmap.start_write_group
325
self.abort_write_group = self._cache.idmap.abort_write_group
326
self.commit_write_group = self._cache.idmap.commit_write_group
327
self.tree_cache = LRUTreeCache(self.repository)
328
self.unpeel_map = UnpeelMap.from_repository(self.repository)
330
def _missing_revisions(self, revisions):
331
return self._cache.idmap.missing_revisions(revisions)
333
def _update_sha_map(self, stop_revision=None):
334
if not self.is_locked():
335
raise AssertionError()
336
if self._map_updated:
338
if (stop_revision is not None and
339
not self._missing_revisions([stop_revision])):
341
graph = self.repository.get_graph()
342
if stop_revision is None:
343
all_revids = self.repository.all_revision_ids()
344
missing_revids = self._missing_revisions(all_revids)
346
heads = set([stop_revision])
347
missing_revids = self._missing_revisions(heads)
349
parents = graph.get_parent_map(heads)
351
for p in parents.values():
352
todo.update([x for x in p if x not in missing_revids])
353
heads = self._missing_revisions(todo)
354
missing_revids.update(heads)
355
if NULL_REVISION in missing_revids:
356
missing_revids.remove(NULL_REVISION)
357
missing_revids = self.repository.has_revisions(missing_revids)
358
if not missing_revids:
359
if stop_revision is None:
360
self._map_updated = True
362
self.start_write_group()
364
pb = ui.ui_factory.nested_progress_bar()
366
for i, revid in enumerate(graph.iter_topo_order(missing_revids)):
367
trace.mutter('processing %r', revid)
368
pb.update("updating git map", i, len(missing_revids))
369
self._update_sha_map_revision(revid)
372
if stop_revision is None:
373
self._map_updated = True
375
self.abort_write_group()
378
self.commit_write_group()
381
self._update_sha_map()
382
return iter(self._cache.idmap.sha1s())
384
def _reconstruct_commit(self, rev, tree_sha, roundtrip, verifiers):
385
"""Reconstruct a Commit object.
387
:param rev: Revision object
388
:param tree_sha: SHA1 of the root tree object
389
:param roundtrip: Whether or not to roundtrip bzr metadata
390
:param verifiers: Verifiers for the commits
391
:return: Commit object
393
def parent_lookup(revid):
395
return self._lookup_revision_sha1(revid)
396
except errors.NoSuchRevision:
398
return self.mapping.export_commit(rev, tree_sha, parent_lookup,
399
roundtrip, verifiers)
401
def _create_fileid_map_blob(self, inv):
402
# FIXME: This can probably be a lot more efficient,
403
# not all files necessarily have to be processed.
405
for (path, ie) in inv.iter_entries():
406
if self.mapping.generate_file_id(path) != ie.file_id:
407
file_ids[path] = ie.file_id
408
return self.mapping.export_fileid_map(file_ids)
410
def _revision_to_objects(self, rev, tree, roundtrip):
411
"""Convert a revision to a set of git objects.
413
:param rev: Bazaar revision object
414
:param tree: Bazaar revision tree
415
:param roundtrip: Whether to roundtrip all Bazaar revision data
417
unusual_modes = extract_unusual_modes(rev)
418
present_parents = self.repository.has_revisions(rev.parent_ids)
419
parent_trees = self.tree_cache.revision_trees(
420
[p for p in rev.parent_ids if p in present_parents])
422
for path, obj, ie in _tree_to_objects(tree, parent_trees,
423
self._cache.idmap, unusual_modes, self.mapping.BZR_DUMMY_FILE):
427
# Don't yield just yet
430
if root_tree is None:
431
# Pointless commit - get the tree sha elsewhere
432
if not rev.parent_ids:
435
base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
436
root_tree = self[self[base_sha1].tree]
437
root_ie = tree.inventory.root
438
if roundtrip and self.mapping.BZR_FILE_IDS_FILE is not None:
439
b = self._create_fileid_map_blob(tree.inventory)
441
root_tree[self.mapping.BZR_FILE_IDS_FILE] = (
442
(stat.S_IFREG | 0644), b.id)
443
yield self.mapping.BZR_FILE_IDS_FILE, b, None
444
yield "", root_tree, root_ie
446
if getattr(StrictTestament3, "from_revision_tree", None):
447
testament3 = StrictTestament3(rev, tree)
449
testament3 = StrictTestament3(rev, tree.inventory)
450
verifiers = { "testament3-sha1": testament3.as_sha1() }
453
commit_obj = self._reconstruct_commit(rev, root_tree.id,
454
roundtrip=roundtrip, verifiers=verifiers)
456
foreign_revid, mapping = mapping_registry.parse_revision_id(
458
except errors.InvalidRevisionId:
461
_check_expected_sha(foreign_revid, commit_obj)
462
yield None, commit_obj, None
464
def _get_updater(self, rev):
465
return self._cache.get_updater(rev)
467
def _update_sha_map_revision(self, revid):
468
rev = self.repository.get_revision(revid)
469
tree = self.tree_cache.revision_tree(rev.revision_id)
470
updater = self._get_updater(rev)
471
for path, obj, ie in self._revision_to_objects(rev, tree,
473
if isinstance(obj, Commit):
474
if getattr(StrictTestament3, "from_revision_tree", None):
475
testament3 = StrictTestament3(rev, tree)
477
testament3 = StrictTestament3(rev, tree.inventory)
478
ie = { "testament3-sha1": testament3.as_sha1() }
479
updater.add_object(obj, ie, path)
480
commit_obj = updater.finish()
483
def _reconstruct_blobs(self, keys):
484
"""Return a Git Blob object from a fileid and revision stored in bzr.
486
:param fileid: File id of the text
487
:param revision: Revision of the text
489
stream = self.repository.iter_files_bytes(
490
((key[0], key[1], key) for key in keys))
491
for (fileid, revision, expected_sha), chunks in stream:
493
blob.chunked = chunks
494
if blob.id != expected_sha and blob.data == "":
495
# Perhaps it's a symlink ?
496
tree = self.tree_cache.revision_tree(revision)
497
entry = tree.inventory[fileid]
498
if entry.kind == 'symlink':
499
blob = symlink_to_blob(entry)
500
_check_expected_sha(expected_sha, blob)
503
def _reconstruct_tree(self, fileid, revid, inv, unusual_modes,
505
"""Return a Git Tree object from a file id and a revision stored in bzr.
507
:param fileid: fileid in the tree.
508
:param revision: Revision of the tree.
510
def get_ie_sha1(entry):
511
if entry.kind == "directory":
513
return self._cache.idmap.lookup_tree_id(entry.file_id,
515
except (NotImplementedError, KeyError):
516
obj = self._reconstruct_tree(entry.file_id, revid, inv,
522
elif entry.kind in ("file", "symlink"):
524
return self._cache.idmap.lookup_blob_id(entry.file_id,
528
return self._reconstruct_blobs(
529
[(entry.file_id, entry.revision, None)]).next().id
531
raise AssertionError("unknown entry kind '%s'" % entry.kind)
532
tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes,
533
self.mapping.BZR_DUMMY_FILE)
534
if (inv.root.file_id == fileid and
535
self.mapping.BZR_FILE_IDS_FILE is not None):
538
b = self._create_fileid_map_blob(inv)
539
# If this is the root tree, add the file ids
540
tree[self.mapping.BZR_FILE_IDS_FILE] = (
541
(stat.S_IFREG | 0644), b.id)
543
_check_expected_sha(expected_sha, tree)
546
def get_parents(self, sha):
547
"""Retrieve the parents of a Git commit by SHA1.
549
:param sha: SHA1 of the commit
550
:raises: KeyError, NotCommitError
552
return self[sha].parents
554
def _lookup_revision_sha1(self, revid):
555
"""Return the SHA1 matching a Bazaar revision."""
556
if revid == NULL_REVISION:
559
return self._cache.idmap.lookup_commit(revid)
562
return mapping_registry.parse_revision_id(revid)[0]
563
except errors.InvalidRevisionId:
564
self._update_sha_map(revid)
565
return self._cache.idmap.lookup_commit(revid)
567
def get_raw(self, sha):
568
"""Get the raw representation of a Git object by SHA1.
570
:param sha: SHA1 of the git object
573
return (obj.type, obj.as_raw_string())
575
def __contains__(self, sha):
576
# See if sha is in map
578
for (type, type_data) in self.lookup_git_sha(sha):
580
if self.repository.has_revision(type_data[0]):
583
if self.repository.texts.has_key(type_data):
586
if self.repository.has_revision(type_data[1]):
589
raise AssertionError("Unknown object type '%s'" % type)
597
self._map_updated = False
598
self.repository.lock_read()
599
return LogicalLockResult(self.unlock)
601
def lock_write(self):
603
self._map_updated = False
604
self.repository.lock_write()
605
return LogicalLockResult(self.unlock)
608
return (self._locked is not None)
612
self._map_updated = False
613
self.repository.unlock()
615
def lookup_git_shas(self, shas):
619
ret[sha] = [("commit", (NULL_REVISION, None, {}))]
622
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
624
# if not, see if there are any unconverted revisions and
625
# add them to the map, search for sha in map again
626
self._update_sha_map()
628
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
633
def lookup_git_sha(self, sha):
634
return self.lookup_git_shas([sha])[sha]
636
def __getitem__(self, sha):
637
if self._cache.content_cache is not None:
639
return self._cache.content_cache[sha]
642
for (kind, type_data) in self.lookup_git_sha(sha):
643
# convert object to git object
645
(revid, tree_sha, verifiers) = type_data
647
rev = self.repository.get_revision(revid)
648
except errors.NoSuchRevision:
649
if revid == NULL_REVISION:
650
raise AssertionError(
651
"should not try to look up NULL_REVISION")
652
trace.mutter('entry for %s %s in shamap: %r, but not '
653
'found in repository', kind, sha, type_data)
655
commit = self._reconstruct_commit(rev, tree_sha,
656
roundtrip=True, verifiers=verifiers)
657
_check_expected_sha(sha, commit)
660
(fileid, revision) = type_data
661
blobs = self._reconstruct_blobs([(fileid, revision, sha)])
664
(fileid, revid) = type_data
666
tree = self.tree_cache.revision_tree(revid)
667
rev = self.repository.get_revision(revid)
668
except errors.NoSuchRevision:
669
trace.mutter('entry for %s %s in shamap: %r, but not found in '
670
'repository', kind, sha, type_data)
672
unusual_modes = extract_unusual_modes(rev)
674
return self._reconstruct_tree(fileid, revid,
675
tree.inventory, unusual_modes, expected_sha=sha)
676
except errors.NoSuchRevision:
679
raise AssertionError("Unknown object type '%s'" % kind)
683
def generate_lossy_pack_contents(self, have, want, progress=None,
685
return self.generate_pack_contents(have, want, progress, get_tagged,
688
def generate_pack_contents(self, have, want, progress=None,
689
get_tagged=None, lossy=False):
690
"""Iterate over the contents of a pack file.
692
:param have: List of SHA1s of objects that should not be sent
693
:param want: List of SHA1s of objects that should be sent
696
ret = self.lookup_git_shas(have + want)
697
for commit_sha in have:
698
commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
700
for (type, type_data) in ret[commit_sha]:
701
assert type == "commit"
702
processed.add(type_data[0])
704
trace.mutter("unable to find remote ref %s", commit_sha)
706
for commit_sha in want:
707
if commit_sha in have:
710
for (type, type_data) in ret[commit_sha]:
711
assert type == "commit"
712
pending.add(type_data[0])
716
graph = self.repository.get_graph()
717
todo = _find_missing_bzr_revids(graph, pending, processed)
718
ret = PackTupleIterable(self)
719
pb = ui.ui_factory.nested_progress_bar()
721
for i, revid in enumerate(todo):
722
pb.update("generating git objects", i, len(todo))
724
rev = self.repository.get_revision(revid)
725
except errors.NoSuchRevision:
727
tree = self.tree_cache.revision_tree(revid)
728
for path, obj, ie in self._revision_to_objects(rev, tree,
729
roundtrip=not lossy):
730
ret.add(obj.id, path)
735
def add_thin_pack(self):
738
fd, path = tempfile.mkstemp(suffix=".pack")
739
f = os.fdopen(fd, 'wb')
741
from dulwich.pack import PackData, Pack
742
from bzrlib.plugins.git.fetch import import_git_objects
745
if os.path.getsize(path) == 0:
748
pd.create_index_v2(path[:-5]+".idx", self.object_store.get_raw)
751
self.repository.lock_write()
753
self.repository.start_write_group()
755
import_git_objects(self.repository, self.mapping,
756
p.iterobjects(get_raw=self.get_raw),
759
self.repository.abort_write_group()
762
self.repository.commit_write_group()
764
self.repository.unlock()
767
# The pack isn't kept around anyway, so no point
768
# in treating full packs different from thin packs
769
add_pack = add_thin_pack