124
117
:param have: Revisions the target already has
125
118
:return: Set of revisions to fetch
120
pending = want - have
130
extra_todo = graph.find_unique_ancestors(rev, handled)
131
todo.update(extra_todo)
132
handled.update(extra_todo)
124
processed.update(pending)
125
next_map = get_parent_map(pending)
127
for item in next_map.iteritems():
131
next_pending.update(p for p in item[1] if p not in processed)
132
pending = next_pending
133
133
if NULL_REVISION in todo:
134
134
todo.remove(NULL_REVISION)
287
287
shamap[ie.file_id] = obj.id
290
class PackTupleIterable(object):
292
def __init__(self, store):
296
def add(self, sha, path):
297
self.objects[sha] = path
300
return len(self.objects)
303
return ((self.store[object_id], path) for (object_id, path) in
304
self.objects.iteritems())
307
290
class BazaarObjectStore(BaseObjectStore):
308
291
"""A Git-style object store backed onto a Bazaar repository."""
310
293
def __init__(self, repository, mapping=None):
311
294
self.repository = repository
312
self._map_updated = False
314
295
if mapping is None:
315
296
self.mapping = default_mapping
317
298
self.mapping = mapping
318
299
self._cache = cache_from_repository(repository)
319
self._content_cache_types = ("tree",)
300
self._content_cache_types = ("tree")
320
301
self.start_write_group = self._cache.idmap.start_write_group
321
302
self.abort_write_group = self._cache.idmap.abort_write_group
322
303
self.commit_write_group = self._cache.idmap.commit_write_group
323
304
self.tree_cache = LRUTreeCache(self.repository)
324
self.unpeel_map = UnpeelMap.from_repository(self.repository)
326
306
def _update_sha_map(self, stop_revision=None):
327
if not self.is_locked():
328
raise AssertionError()
329
if self._map_updated:
331
if (stop_revision is not None and
332
not self._cache.idmap.missing_revisions([stop_revision])):
334
307
graph = self.repository.get_graph()
335
308
if stop_revision is None:
336
309
heads = graph.heads(self.repository.all_revision_ids())
430
399
if roundtrip and self.mapping.BZR_FILE_IDS_FILE is not None:
431
400
b = self._create_fileid_map_blob(tree.inventory)
432
401
if b is not None:
433
root_tree[self.mapping.BZR_FILE_IDS_FILE] = (
434
(stat.S_IFREG | 0644), b.id)
402
root_tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
435
403
yield self.mapping.BZR_FILE_IDS_FILE, b, None
436
404
yield "", root_tree, root_ie
438
if getattr(StrictTestament3, "from_revision_tree", None):
439
testament3 = StrictTestament3(rev, tree)
441
testament3 = StrictTestament3(rev, tree.inventory)
406
testament3 = StrictTestament3(rev, tree.inventory)
442
407
verifiers = { "testament3-sha1": testament3.as_sha1() }
463
428
for path, obj, ie in self._revision_to_objects(rev, tree,
465
430
if isinstance(obj, Commit):
466
if getattr(StrictTestament3, "from_revision_tree", None):
467
testament3 = StrictTestament3(rev, tree)
469
testament3 = StrictTestament3(rev, tree.inventory)
431
testament3 = StrictTestament3(rev, tree.inventory)
470
432
ie = { "testament3-sha1": testament3.as_sha1() }
471
433
updater.add_object(obj, ie, path)
472
434
commit_obj = updater.finish()
525
487
self.mapping.BZR_DUMMY_FILE)
526
488
if (inv.root.file_id == fileid and
527
489
self.mapping.BZR_FILE_IDS_FILE is not None):
530
490
b = self._create_fileid_map_blob(inv)
531
491
# If this is the root tree, add the file ids
532
tree[self.mapping.BZR_FILE_IDS_FILE] = (
533
(stat.S_IFREG | 0644), b.id)
535
_check_expected_sha(expected_sha, tree)
492
tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
493
_check_expected_sha(expected_sha, tree)
538
496
def get_parents(self, sha):
567
530
def __contains__(self, sha):
568
531
# See if sha is in map
570
for (type, type_data) in self.lookup_git_sha(sha):
572
if self.repository.has_revision(type_data[0]):
575
if self.repository.texts.has_key(type_data):
578
if self.repository.has_revision(type_data[1]):
581
raise AssertionError("Unknown object type '%s'" % type)
533
(type, type_data) = self.lookup_git_sha(sha)
535
return self.repository.has_revision(type_data[0])
537
return self.repository.texts.has_key(type_data)
539
return self.repository.has_revision(type_data[1])
541
raise AssertionError("Unknown object type '%s'" % type)
589
self._map_updated = False
590
self.repository.lock_read()
591
return LogicalLockResult(self.unlock)
593
def lock_write(self):
595
self._map_updated = False
596
self.repository.lock_write()
597
return LogicalLockResult(self.unlock)
600
return (self._locked is not None)
604
self._map_updated = False
605
self.repository.unlock()
607
def lookup_git_shas(self, shas):
545
def lookup_git_shas(self, shas, update_map=True):
546
from dulwich.protocol import ZERO_SHA
610
549
if sha == ZERO_SHA:
611
ret[sha] = [("commit", (NULL_REVISION, None, {}))]
550
ret[sha] = ("commit", (NULL_REVISION, None, {}))
614
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
553
ret[sha] = self._cache.idmap.lookup_git_sha(sha)
616
# if not, see if there are any unconverted revisions and
617
# add them to the map, search for sha in map again
618
self._update_sha_map()
620
ret[sha] = list(self._cache.idmap.lookup_git_sha(sha))
556
# if not, see if there are any unconverted revisions and add
557
# them to the map, search for sha in map again
558
self._update_sha_map()
561
ret[sha] = self._cache.idmap.lookup_git_sha(sha)
625
def lookup_git_sha(self, sha):
626
return self.lookup_git_shas([sha])[sha]
566
def lookup_git_sha(self, sha, update_map=True):
567
return self.lookup_git_shas([sha], update_map=update_map)[sha]
628
569
def __getitem__(self, sha):
629
570
if self._cache.content_cache is not None:
631
572
return self._cache.content_cache[sha]
634
for (kind, type_data) in self.lookup_git_sha(sha):
635
# convert object to git object
637
(revid, tree_sha, verifiers) = type_data
639
rev = self.repository.get_revision(revid)
640
except errors.NoSuchRevision:
641
trace.mutter('entry for %s %s in shamap: %r, but not '
642
'found in repository', kind, sha, type_data)
644
commit = self._reconstruct_commit(rev, tree_sha,
645
roundtrip=True, verifiers=verifiers)
646
_check_expected_sha(sha, commit)
649
(fileid, revision) = type_data
650
blobs = self._reconstruct_blobs([(fileid, revision, sha)])
653
(fileid, revid) = type_data
655
tree = self.tree_cache.revision_tree(revid)
656
rev = self.repository.get_revision(revid)
657
except errors.NoSuchRevision:
658
trace.mutter('entry for %s %s in shamap: %r, but not found in repository', kind, sha, type_data)
660
unusual_modes = extract_unusual_modes(rev)
662
return self._reconstruct_tree(fileid, revid,
663
tree.inventory, unusual_modes, expected_sha=sha)
664
except errors.NoSuchRevision:
667
raise AssertionError("Unknown object type '%s'" % kind)
575
(type, type_data) = self.lookup_git_sha(sha)
576
# convert object to git object
578
(revid, tree_sha, verifiers) = type_data
580
rev = self.repository.get_revision(revid)
581
except errors.NoSuchRevision:
582
trace.mutter('entry for %s %s in shamap: %r, but not found in '
583
'repository', type, sha, type_data)
585
commit = self._reconstruct_commit(rev, tree_sha, roundtrip=True,
587
_check_expected_sha(sha, commit)
590
(fileid, revision) = type_data
591
return self._reconstruct_blobs([(fileid, revision, sha)]).next()
593
(fileid, revid) = type_data
595
tree = self.tree_cache.revision_tree(revid)
596
rev = self.repository.get_revision(revid)
597
except errors.NoSuchRevision:
598
trace.mutter('entry for %s %s in shamap: %r, but not found in repository', type, sha, type_data)
600
unusual_modes = extract_unusual_modes(rev)
602
return self._reconstruct_tree(fileid, revid, tree.inventory,
603
unusual_modes, expected_sha=sha)
604
except errors.NoSuchRevision:
607
raise AssertionError("Unknown object type '%s'" % type)
671
609
def generate_lossy_pack_contents(self, have, want, progress=None,
672
610
get_tagged=None):
683
621
processed = set()
684
622
ret = self.lookup_git_shas(have + want)
685
623
for commit_sha in have:
686
commit_sha = self.unpeel_map.peel_tag(commit_sha, commit_sha)
688
for (type, type_data) in ret[commit_sha]:
689
assert type == "commit"
690
processed.add(type_data[0])
625
(type, (revid, tree_sha)) = ret[commit_sha]
692
trace.mutter("unable to find remote ref %s", commit_sha)
629
assert type == "commit"
694
632
for commit_sha in want:
695
633
if commit_sha in have:
698
for (type, type_data) in ret[commit_sha]:
699
assert type == "commit"
700
pending.add(type_data[0])
636
(type, (revid, tree_sha)) = ret[commit_sha]
640
assert type == "commit"
704
graph = self.repository.get_graph()
705
todo = _find_missing_bzr_revids(graph, pending, processed)
643
todo = _find_missing_bzr_revids(self.repository.get_parent_map,
706
645
trace.mutter('sending revisions %r', todo)
707
ret = PackTupleIterable(self)
708
647
pb = ui.ui_factory.nested_progress_bar()
710
649
for i, revid in enumerate(todo):
711
650
pb.update("generating git objects", i, len(todo))
713
rev = self.repository.get_revision(revid)
714
except errors.NoSuchRevision:
651
rev = self.repository.get_revision(revid)
716
652
tree = self.tree_cache.revision_tree(revid)
717
653
for path, obj, ie in self._revision_to_objects(rev, tree,
718
654
roundtrip=not lossy):
719
ret.add(obj.id, path)
655
ret.append((obj, path))
724
660
def add_thin_pack(self):