1
# Copyright (C) 2008-2010 Jelmer Vernooij <jelmer@samba.org>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
from __future__ import absolute_import
19
from dulwich.errors import (
22
from dulwich.objects import (
30
from dulwich.object_store import (
31
ObjectStoreGraphWalker,
34
from dulwich.walk import Walker
35
from itertools import (
49
from ...errors import (
52
from ...bzr.inventory import (
58
from ...repository import (
61
from ...revision import (
64
from ...bzr.inventorytree import InventoryRevisionTree
65
from ...testament import (
68
from ...tsort import (
71
from ...bzr.versionedfile import (
72
ChunkedContentFactory,
75
from .mapping import (
81
from .object_store import (
92
from .repository import (
99
def import_git_blob(texts, mapping, path, name, (base_hexsha, hexsha),
100
base_bzr_tree, parent_id, revision_id,
101
parent_bzr_trees, lookup_object, (base_mode, mode), store_updater,
103
"""Import a git blob object into a bzr repository.
105
:param texts: VersionedFiles to add to
106
:param path: Path in the tree
107
:param blob: A git blob
108
:return: Inventory delta for this file
110
if mapping.is_special_file(path):
112
if base_hexsha == hexsha and base_mode == mode:
113
# If nothing has changed since the base revision, we're done
115
file_id = lookup_file_id(path)
116
if stat.S_ISLNK(mode):
120
ie = cls(file_id, name.decode("utf-8"), parent_id)
121
if ie.kind == "file":
122
ie.executable = mode_is_executable(mode)
123
if base_hexsha == hexsha and mode_kind(base_mode) == mode_kind(mode):
124
base_exec = base_bzr_tree.is_executable(path)
125
if ie.kind == "symlink":
126
ie.symlink_target = base_bzr_tree.get_symlink_target(path)
128
ie.text_size = base_bzr_tree.get_file_size(path)
129
ie.text_sha1 = base_bzr_tree.get_file_sha1(path)
130
if ie.kind == "symlink" or ie.executable == base_exec:
131
ie.revision = base_bzr_tree.get_file_revision(path)
133
blob = lookup_object(hexsha)
135
blob = lookup_object(hexsha)
136
if ie.kind == "symlink":
138
ie.symlink_target = blob.data.decode("utf-8")
140
ie.text_size = sum(imap(len, blob.chunked))
141
ie.text_sha1 = osutils.sha_strings(blob.chunked)
142
# Check what revision we should store
144
for ptree in parent_bzr_trees:
146
ppath = ptree.id2path(file_id)
147
except errors.NoSuchId:
149
pkind = ptree.kind(ppath, file_id)
150
if (pkind == ie.kind and
151
((pkind == "symlink" and ptree.get_symlink_target(ppath, file_id) == ie.symlink_target) or
152
(pkind == "file" and ptree.get_file_sha1(ppath, file_id) == ie.text_sha1 and
153
ptree.is_executable(ppath, file_id) == ie.executable))):
154
# found a revision in one of the parents to use
155
ie.revision = ptree.get_file_revision(ppath, file_id)
157
parent_key = (file_id, ptree.get_file_revision(ppath, file_id))
158
if not parent_key in parent_keys:
159
parent_keys.append(parent_key)
160
if ie.revision is None:
161
# Need to store a new revision
162
ie.revision = revision_id
163
assert ie.revision is not None
164
if ie.kind == 'symlink':
167
chunks = blob.chunked
168
texts.insert_record_stream([
169
ChunkedContentFactory((file_id, ie.revision),
170
tuple(parent_keys), ie.text_sha1, chunks)])
172
if base_hexsha is not None:
173
old_path = path.decode("utf-8") # Renames are not supported yet
174
if stat.S_ISDIR(base_mode):
175
invdelta.extend(remove_disappeared_children(base_bzr_tree, old_path,
176
lookup_object(base_hexsha), [], lookup_object))
179
new_path = path.decode("utf-8")
180
invdelta.append((old_path, new_path, file_id, ie))
181
if base_hexsha != hexsha:
182
store_updater.add_object(blob, (ie.file_id, ie.revision), path)
186
class SubmodulesRequireSubtrees(BzrError):
187
_fmt = ("The repository you are fetching from contains submodules, "
188
"which are not yet supported.")
192
def import_git_submodule(texts, mapping, path, name, (base_hexsha, hexsha),
193
base_bzr_tree, parent_id, revision_id, parent_bzr_trees, lookup_object,
194
(base_mode, mode), store_updater, lookup_file_id):
195
"""Import a git submodule."""
196
if base_hexsha == hexsha and base_mode == mode:
198
file_id = lookup_file_id(path)
200
ie = TreeReference(file_id, name.decode("utf-8"), parent_id)
201
ie.revision = revision_id
202
if base_hexsha is not None:
203
old_path = path.decode("utf-8") # Renames are not supported yet
204
if stat.S_ISDIR(base_mode):
205
invdelta.extend(remove_disappeared_children(base_bzr_tree, old_path,
206
lookup_object(base_hexsha), [], lookup_object))
209
ie.reference_revision = mapping.revision_id_foreign_to_bzr(hexsha)
210
texts.insert_record_stream([
211
ChunkedContentFactory((file_id, ie.revision), (), None, [])])
212
invdelta.append((old_path, path, file_id, ie))
216
def remove_disappeared_children(base_bzr_tree, path, base_tree, existing_children,
218
"""Generate an inventory delta for removed children.
220
:param base_bzr_tree: Base bzr tree against which to generate the
222
:param path: Path to process (unicode)
223
:param base_tree: Git Tree base object
224
:param existing_children: Children that still exist
225
:param lookup_object: Lookup a git object by its SHA1
226
:return: Inventory delta, as list
228
assert type(path) is unicode
230
for name, mode, hexsha in base_tree.iteritems():
231
if name in existing_children:
233
c_path = posixpath.join(path, name.decode("utf-8"))
234
file_id = base_bzr_tree.path2id(c_path)
235
assert file_id is not None
236
ret.append((c_path, None, file_id, None))
237
if stat.S_ISDIR(mode):
238
ret.extend(remove_disappeared_children(
239
base_bzr_tree, c_path, lookup_object(hexsha), [], lookup_object))
243
def import_git_tree(texts, mapping, path, name, (base_hexsha, hexsha),
244
base_bzr_tree, parent_id, revision_id, parent_bzr_trees,
245
lookup_object, (base_mode, mode), store_updater,
246
lookup_file_id, allow_submodules=False):
247
"""Import a git tree object into a bzr repository.
249
:param texts: VersionedFiles object to add to
250
:param path: Path in the tree (str)
251
:param name: Name of the tree (str)
252
:param tree: A git tree object
253
:param base_bzr_tree: Base inventory against which to return inventory delta
254
:return: Inventory delta for this subtree
256
assert type(path) is str
257
assert type(name) is str
258
if base_hexsha == hexsha and base_mode == mode:
259
# If nothing has changed since the base revision, we're done
262
file_id = lookup_file_id(path)
263
# We just have to hope this is indeed utf-8:
264
ie = InventoryDirectory(file_id, name.decode("utf-8"), parent_id)
265
tree = lookup_object(hexsha)
266
if base_hexsha is None:
268
old_path = None # Newly appeared here
270
base_tree = lookup_object(base_hexsha)
271
old_path = path.decode("utf-8") # Renames aren't supported yet
272
new_path = path.decode("utf-8")
273
if base_tree is None or type(base_tree) is not Tree:
274
ie.revision = revision_id
275
invdelta.append((old_path, new_path, ie.file_id, ie))
276
texts.insert_record_stream([
277
ChunkedContentFactory((ie.file_id, ie.revision), (), None, [])])
278
# Remember for next time
279
existing_children = set()
281
for name, child_mode, child_hexsha in tree.iteritems():
282
existing_children.add(name)
283
child_path = posixpath.join(path, name)
284
if type(base_tree) is Tree:
286
child_base_mode, child_base_hexsha = base_tree[name]
288
child_base_hexsha = None
291
child_base_hexsha = None
293
if stat.S_ISDIR(child_mode):
294
subinvdelta, grandchildmodes = import_git_tree(texts, mapping,
295
child_path, name, (child_base_hexsha, child_hexsha),
296
base_bzr_tree, file_id, revision_id, parent_bzr_trees,
297
lookup_object, (child_base_mode, child_mode), store_updater,
298
lookup_file_id, allow_submodules=allow_submodules)
299
elif S_ISGITLINK(child_mode): # submodule
300
if not allow_submodules:
301
raise SubmodulesRequireSubtrees()
302
subinvdelta, grandchildmodes = import_git_submodule(texts, mapping,
303
child_path, name, (child_base_hexsha, child_hexsha),
304
base_bzr_tree, file_id, revision_id, parent_bzr_trees,
305
lookup_object, (child_base_mode, child_mode), store_updater,
308
if not mapping.is_special_file(name):
309
subinvdelta = import_git_blob(texts, mapping, child_path, name,
310
(child_base_hexsha, child_hexsha), base_bzr_tree, file_id,
311
revision_id, parent_bzr_trees, lookup_object,
312
(child_base_mode, child_mode), store_updater, lookup_file_id)
316
child_modes.update(grandchildmodes)
317
invdelta.extend(subinvdelta)
318
if child_mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
319
stat.S_IFLNK, DEFAULT_FILE_MODE|0111,
321
child_modes[child_path] = child_mode
322
# Remove any children that have disappeared
323
if base_tree is not None and type(base_tree) is Tree:
324
invdelta.extend(remove_disappeared_children(base_bzr_tree, old_path,
325
base_tree, existing_children, lookup_object))
326
store_updater.add_object(tree, (file_id, ), path)
327
return invdelta, child_modes
330
def verify_commit_reconstruction(target_git_object_retriever, lookup_object,
331
o, rev, ret_tree, parent_trees, mapping, unusual_modes, verifiers):
332
new_unusual_modes = mapping.export_unusual_file_modes(rev)
333
if new_unusual_modes != unusual_modes:
334
raise AssertionError("unusual modes don't match: %r != %r" % (
335
unusual_modes, new_unusual_modes))
336
# Verify that we can reconstruct the commit properly
337
rec_o = target_git_object_retriever._reconstruct_commit(rev, o.tree, True,
340
raise AssertionError("Reconstructed commit differs: %r != %r" % (
344
for path, obj, ie in _tree_to_objects(ret_tree, parent_trees,
345
target_git_object_retriever._cache.idmap, unusual_modes,
346
mapping.BZR_DUMMY_FILE):
347
old_obj_id = tree_lookup_path(lookup_object, o.tree, path)[1]
349
if obj.id != old_obj_id:
350
diff.append((path, lookup_object(old_obj_id), obj))
351
for (path, old_obj, new_obj) in diff:
352
while (old_obj.type_name == "tree" and
353
new_obj.type_name == "tree" and
354
sorted(old_obj) == sorted(new_obj)):
356
if old_obj[name][0] != new_obj[name][0]:
357
raise AssertionError("Modes for %s differ: %o != %o" %
358
(path, old_obj[name][0], new_obj[name][0]))
359
if old_obj[name][1] != new_obj[name][1]:
360
# Found a differing child, delve deeper
361
path = posixpath.join(path, name)
362
old_obj = lookup_object(old_obj[name][1])
363
new_obj = new_objs[path]
365
raise AssertionError("objects differ for %s: %r != %r" % (path,
369
def ensure_inventories_in_repo(repo, trees):
370
real_inv_vf = repo.inventories.without_fallbacks()
372
revid = t.get_revision_id()
373
if not real_inv_vf.get_parent_map([(revid, )]):
374
repo.add_inventory(revid, t.inventory, t.get_parent_ids())
377
def import_git_commit(repo, mapping, head, lookup_object,
378
target_git_object_retriever, trees_cache):
379
o = lookup_object(head)
380
# Note that this uses mapping.revision_id_foreign_to_bzr. If the parents
381
# were bzr roundtripped revisions they would be specified in the
383
rev, roundtrip_revid, verifiers = mapping.import_commit(
384
o, mapping.revision_id_foreign_to_bzr)
385
if roundtrip_revid is not None:
386
original_revid = rev.revision_id
387
rev.revision_id = roundtrip_revid
388
# We have to do this here, since we have to walk the tree and
389
# we need to make sure to import the blobs / trees with the right
390
# path; this may involve adding them more than once.
391
parent_trees = trees_cache.revision_trees(rev.parent_ids)
392
ensure_inventories_in_repo(repo, parent_trees)
393
if parent_trees == []:
394
base_bzr_tree = trees_cache.revision_tree(NULL_REVISION)
398
base_bzr_tree = parent_trees[0]
399
base_tree = lookup_object(o.parents[0]).tree
400
base_mode = stat.S_IFDIR
401
store_updater = target_git_object_retriever._get_updater(rev)
402
tree_supplement = mapping.get_fileid_map(lookup_object, o.tree)
403
inv_delta, unusual_modes = import_git_tree(repo.texts,
404
mapping, "", "", (base_tree, o.tree), base_bzr_tree,
405
None, rev.revision_id, parent_trees,
406
lookup_object, (base_mode, stat.S_IFDIR), store_updater,
407
tree_supplement.lookup_file_id,
408
allow_submodules=getattr(repo._format, "supports_tree_reference",
410
if unusual_modes != {}:
411
for path, mode in unusual_modes.iteritems():
412
warn_unusual_mode(rev.foreign_revid, path, mode)
413
mapping.import_unusual_file_modes(rev, unusual_modes)
415
basis_id = rev.parent_ids[0]
417
basis_id = NULL_REVISION
418
base_bzr_inventory = None
421
base_bzr_inventory = base_bzr_tree.root_inventory
422
except AttributeError: # bzr < 2.6
423
base_bzr_inventory = base_bzr_tree.inventory
424
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
425
inv_delta, rev.revision_id, rev.parent_ids,
427
ret_tree = InventoryRevisionTree(repo, inv, rev.revision_id)
429
if verifiers and roundtrip_revid is not None:
430
testament = StrictTestament3(rev, ret_tree)
431
calculated_verifiers = { "testament3-sha1": testament.as_sha1() }
432
if calculated_verifiers != verifiers:
433
trace.mutter("Testament SHA1 %r for %r did not match %r.",
434
calculated_verifiers["testament3-sha1"],
435
rev.revision_id, verifiers["testament3-sha1"])
436
rev.revision_id = original_revid
437
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
438
inv_delta, rev.revision_id, rev.parent_ids, base_bzr_tree)
439
ret_tree = InventoryRevisionTree(repo, inv, rev.revision_id)
441
calculated_verifiers = {}
442
store_updater.add_object(o, calculated_verifiers, None)
443
store_updater.finish()
444
trees_cache.add(ret_tree)
445
repo.add_revision(rev.revision_id, rev)
446
if "verify" in debug.debug_flags:
447
verify_commit_reconstruction(target_git_object_retriever,
448
lookup_object, o, rev, ret_tree, parent_trees, mapping,
449
unusual_modes, verifiers)
452
def import_git_objects(repo, mapping, object_iter,
453
target_git_object_retriever, heads, pb=None, limit=None):
454
"""Import a set of git objects into a bzr repository.
456
:param repo: Target Bazaar repository
457
:param mapping: Mapping to use
458
:param object_iter: Iterator over Git objects.
459
:return: Tuple with pack hints and last imported revision id
461
def lookup_object(sha):
463
return object_iter[sha]
465
return target_git_object_retriever[sha]
468
heads = list(set(heads))
469
trees_cache = LRUTreeCache(repo)
470
# Find and convert commit objects
473
pb.update("finding revisions to fetch", len(graph), None)
477
assert isinstance(head, str), "head is %r" % (head,)
479
o = lookup_object(head)
482
if isinstance(o, Commit):
483
rev, roundtrip_revid, verifiers = mapping.import_commit(o,
484
mapping.revision_id_foreign_to_bzr)
485
if (repo.has_revision(rev.revision_id) or
486
(roundtrip_revid and repo.has_revision(roundtrip_revid))):
488
graph.append((o.id, o.parents))
489
heads.extend([p for p in o.parents if p not in checked])
490
elif isinstance(o, Tag):
491
if o.object[1] not in checked:
492
heads.append(o.object[1])
494
trace.warning("Unable to import head object %r" % o)
497
# Order the revisions
498
# Create the inventory objects
500
revision_ids = topo_sort(graph)
502
if limit is not None:
503
revision_ids = revision_ids[:limit]
505
for offset in range(0, len(revision_ids), batch_size):
506
target_git_object_retriever.start_write_group()
508
repo.start_write_group()
510
for i, head in enumerate(
511
revision_ids[offset:offset+batch_size]):
513
pb.update("fetching revisions", offset+i,
515
import_git_commit(repo, mapping, head, lookup_object,
516
target_git_object_retriever, trees_cache)
519
repo.abort_write_group()
522
hint = repo.commit_write_group()
524
pack_hints.extend(hint)
526
target_git_object_retriever.abort_write_group()
529
target_git_object_retriever.commit_write_group()
530
return pack_hints, last_imported
533
class InterFromGitRepository(InterRepository):
535
_matching_repo_format = GitRepositoryFormat()
537
def _target_has_shas(self, shas):
538
raise NotImplementedError(self._target_has_shas)
540
def get_determine_wants_heads(self, wants, include_tags=False):
541
raise NotImplementedError(self.get_determine_wants_heads)
543
def determine_wants_all(self, refs):
544
raise NotImplementedError(self.determine_wants_all)
547
def _get_repo_format_to_test():
550
def copy_content(self, revision_id=None):
551
"""See InterRepository.copy_content."""
552
self.fetch(revision_id, find_ghosts=False)
554
def search_missing_revision_ids(self,
555
find_ghosts=True, revision_ids=None, if_present_ids=None,
557
if limit is not None:
558
raise errors.FetchLimitUnsupported(self)
562
todo.extend(revision_ids)
564
todo.extend(revision_ids)
565
for revid in revision_ids:
566
if revid == NULL_REVISION:
568
git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
569
git_shas.append(git_sha)
570
walker = Walker(self.source._git.object_store,
571
include=git_shas, exclude=[
572
sha for sha in self.target.controldir.get_refs_container().as_dict().values() if sha != ZERO_SHA])
573
missing_revids = set()
575
missing_revids.add(self.source.lookup_foreign_revision_id(entry.commit.id))
576
return self.source.revision_ids_to_search_result(missing_revids)
579
class InterGitNonGitRepository(InterFromGitRepository):
580
"""Base InterRepository that copies revisions from a Git into a non-Git
583
def _target_has_shas(self, shas):
587
revid = self.source.lookup_foreign_revision_id(sha)
588
except NotCommitError:
589
# Commit is definitely not present
593
return set([revids[r] for r in self.target.has_revisions(revids)])
595
def determine_wants_all(self, refs):
597
for k, v in refs.as_dict().iteritems():
598
# For non-git target repositories, only worry about peeled
601
potential.add(self.source.controldir.get_peeled(k))
602
return list(potential - self._target_has_shas(potential))
604
def get_determine_wants_heads(self, wants, include_tags=False):
606
def determine_wants(refs):
607
potential = set(wants)
609
for k, unpeeled in refs.as_dict().iteritems():
612
if unpeeled == ZERO_SHA:
614
potential.add(self.source.controldir.get_peeled(k))
615
return list(potential - self._target_has_shas(potential))
616
return determine_wants
618
def get_determine_wants_revids(self, revids, include_tags=False):
620
for revid in set(revids):
621
if self.target.has_revision(revid):
623
git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
625
return self.get_determine_wants_heads(wants, include_tags=include_tags)
627
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
628
"""Fetch objects from a remote server.
630
:param determine_wants: determine_wants callback
631
:param mapping: BzrGitMapping to use
632
:param limit: Maximum number of commits to import.
633
:return: Tuple with pack hint, last imported revision id and remote refs
635
raise NotImplementedError(self.fetch_objects)
637
def fetch(self, revision_id=None, find_ghosts=False,
638
mapping=None, fetch_spec=None, include_tags=False):
640
mapping = self.source.get_mapping()
641
if revision_id is not None:
642
interesting_heads = [revision_id]
643
elif fetch_spec is not None:
644
recipe = fetch_spec.get_recipe()
645
if recipe[0] in ("search", "proxy-search"):
646
interesting_heads = recipe[1]
648
raise AssertionError("Unsupported search result type %s" %
651
interesting_heads = None
653
if interesting_heads is not None:
654
determine_wants = self.get_determine_wants_revids(
655
interesting_heads, include_tags=include_tags)
657
determine_wants = self.determine_wants_all
659
(pack_hint, _, remote_refs) = self.fetch_objects(determine_wants,
661
if pack_hint is not None and self.target._format.pack_compresses:
662
self.target.pack(hint=pack_hint)
666
_GIT_PROGRESS_RE = re.compile(r"(.*?): +(\d+)% \((\d+)/(\d+)\)")
667
def report_git_progress(pb, text):
668
text = text.rstrip("\r\n")
669
g = _GIT_PROGRESS_RE.match(text)
671
(text, pct, current, total) = g.groups()
672
pb.update(text, int(current), int(total))
674
pb.update(text, 0, 0)
677
class DetermineWantsRecorder(object):
679
def __init__(self, actual):
682
self.remote_refs = {}
684
def __call__(self, refs):
685
assert isinstance(refs, dict)
686
self.remote_refs = refs
687
self.wants = self.actual(refs)
691
class InterRemoteGitNonGitRepository(InterGitNonGitRepository):
692
"""InterRepository that copies revisions from a remote Git into a non-Git
695
def get_target_heads(self):
696
# FIXME: This should be more efficient
697
all_revs = self.target.all_revision_ids()
698
parent_map = self.target.get_parent_map(all_revs)
700
map(all_parents.update, parent_map.itervalues())
701
return set(all_revs) - all_parents
703
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
704
"""See `InterGitNonGitRepository`."""
705
store = BazaarObjectStore(self.target, mapping)
708
heads = self.get_target_heads()
709
graph_walker = ObjectStoreGraphWalker(
710
[store._lookup_revision_sha1(head) for head in heads],
711
lambda sha: store[sha].parents)
712
wants_recorder = DetermineWantsRecorder(determine_wants)
714
pb = ui.ui_factory.nested_progress_bar()
716
objects_iter = self.source.fetch_objects(
717
wants_recorder, graph_walker, store.get_raw,
718
progress=lambda text: report_git_progress(pb, text),)
719
trace.mutter("Importing %d new revisions",
720
len(wants_recorder.wants))
721
(pack_hint, last_rev) = import_git_objects(self.target,
722
mapping, objects_iter, store, wants_recorder.wants, pb,
724
return (pack_hint, last_rev, wants_recorder.remote_refs)
731
def is_compatible(source, target):
732
"""Be compatible with GitRepository."""
733
if not isinstance(source, RemoteGitRepository):
735
if not target.supports_rich_root():
737
if isinstance(target, GitRepository):
739
if not getattr(target._format, "supports_full_versioned_files", True):
744
class InterLocalGitNonGitRepository(InterGitNonGitRepository):
745
"""InterRepository that copies revisions from a local Git into a non-Git
748
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
749
"""See `InterGitNonGitRepository`."""
750
remote_refs = self.source.controldir.get_refs_container()
751
wants = determine_wants(remote_refs)
753
pb = ui.ui_factory.nested_progress_bar()
754
target_git_object_retriever = BazaarObjectStore(self.target, mapping)
756
target_git_object_retriever.lock_write()
758
(pack_hint, last_rev) = import_git_objects(self.target,
759
mapping, self.source._git.object_store,
760
target_git_object_retriever, wants, pb, limit)
761
return (pack_hint, last_rev, remote_refs)
763
target_git_object_retriever.unlock()
768
def is_compatible(source, target):
769
"""Be compatible with GitRepository."""
770
if not isinstance(source, LocalGitRepository):
772
if not target.supports_rich_root():
774
if isinstance(target, GitRepository):
776
if not getattr(target._format, "supports_full_versioned_files", True):
781
class InterGitGitRepository(InterFromGitRepository):
782
"""InterRepository that copies between Git repositories."""
784
def fetch_refs(self, update_refs, lossy=False):
786
raise errors.LossyPushToSameVCS(self.source, self.target)
787
old_refs = self.target.controldir.get_refs_container()
789
def determine_wants(heads):
790
old_refs = dict([(k, (v, None)) for (k, v) in heads.as_dict().iteritems()])
791
new_refs = update_refs(old_refs)
792
ref_changes.update(new_refs)
793
return [sha1 for (sha1, bzr_revid) in new_refs.itervalues()]
794
self.fetch_objects(determine_wants, lossy=lossy)
795
for k, (git_sha, bzr_revid) in ref_changes.iteritems():
796
self.target._git.refs[k] = git_sha
797
new_refs = self.target.controldir.get_refs_container()
798
return None, old_refs, new_refs
800
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False):
802
raise errors.LossyPushToSameVCS(self.source, self.target)
803
if limit is not None:
804
raise errors.FetchLimitUnsupported(self)
805
graphwalker = self.target._git.get_graph_walker()
806
if (isinstance(self.source, LocalGitRepository) and
807
isinstance(self.target, LocalGitRepository)):
808
def wrap_determine_wants(refs):
809
return determine_wants(self.source._git.refs.as_dict())
810
pb = ui.ui_factory.nested_progress_bar()
812
refs = self.source._git.fetch(self.target._git, wrap_determine_wants,
813
lambda text: report_git_progress(pb, text))
816
return (None, None, refs)
817
elif (isinstance(self.source, LocalGitRepository) and
818
isinstance(self.target, RemoteGitRepository)):
819
raise NotImplementedError
820
elif (isinstance(self.source, RemoteGitRepository) and
821
isinstance(self.target, LocalGitRepository)):
822
pb = ui.ui_factory.nested_progress_bar()
824
f, commit, abort = self.target._git.object_store.add_pack()
826
refs = self.source.controldir.fetch_pack(
827
determine_wants, graphwalker, f.write,
828
lambda text: report_git_progress(pb, text))
830
return (None, None, refs)
831
except BaseException:
837
raise AssertionError("fetching between %r and %r not supported" %
838
(self.source, self.target))
840
def _target_has_shas(self, shas):
841
return set([sha for sha in shas if sha in self.target._git.object_store])
843
def fetch(self, revision_id=None, find_ghosts=False,
844
mapping=None, fetch_spec=None, branches=None, limit=None, include_tags=False):
846
mapping = self.source.get_mapping()
848
if revision_id is not None:
850
elif fetch_spec is not None:
851
recipe = fetch_spec.get_recipe()
852
if recipe[0] in ("search", "proxy-search"):
855
raise AssertionError(
856
"Unsupported search result type %s" % recipe[0])
858
if branches is not None:
859
def determine_wants(refs):
861
for name, value in refs.iteritems():
862
if value == ZERO_SHA:
865
if name in branches or (include_tags and is_tag(name)):
868
elif fetch_spec is None and revision_id is None:
869
determine_wants = self.determine_wants_all
871
determine_wants = self.get_determine_wants_revids(args, include_tags=include_tags)
872
wants_recorder = DetermineWantsRecorder(determine_wants)
873
self.fetch_objects(wants_recorder, mapping, limit=limit)
874
return wants_recorder.remote_refs
877
def is_compatible(source, target):
878
"""Be compatible with GitRepository."""
879
return (isinstance(source, GitRepository) and
880
isinstance(target, GitRepository))
882
def get_determine_wants_revids(self, revids, include_tags=False):
884
for revid in set(revids):
885
if self.target.has_revision(revid):
887
git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
889
return self.get_determine_wants_heads(wants,
890
include_tags=include_tags)
892
def determine_wants_all(self, refs):
893
potential = set([v for v in refs.values() if not v == ZERO_SHA])
894
return list(potential - self._target_has_shas(potential))
896
def get_determine_wants_heads(self, wants, include_tags=False):
898
def determine_wants(refs):
899
potential = set(wants)
901
for k, unpeeled in refs.iteritems():
904
if unpeeled == ZERO_SHA:
906
potential.add(unpeeled)
907
return list(potential - self._target_has_shas(potential))
908
return determine_wants