1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Fetching from git into bzr."""
19
from __future__ import absolute_import
21
from dulwich.errors import (
24
from dulwich.objects import (
32
from dulwich.object_store import (
33
ObjectStoreGraphWalker,
36
from dulwich.walk import Walker
37
from itertools import (
51
from ...errors import (
54
from ...bzr.inventory import (
60
from ...repository import (
63
from ...revision import (
66
from ...bzr.inventorytree import InventoryRevisionTree
67
from ...testament import (
70
from ...tsort import (
73
from ...bzr.versionedfile import (
74
ChunkedContentFactory,
77
from .mapping import (
83
from .object_store import (
94
from .repository import (
101
def import_git_blob(texts, mapping, path, name, (base_hexsha, hexsha),
102
base_bzr_tree, parent_id, revision_id,
103
parent_bzr_trees, lookup_object, (base_mode, mode), store_updater,
105
"""Import a git blob object into a bzr repository.
107
:param texts: VersionedFiles to add to
108
:param path: Path in the tree
109
:param blob: A git blob
110
:return: Inventory delta for this file
112
if mapping.is_special_file(path):
114
if base_hexsha == hexsha and base_mode == mode:
115
# If nothing has changed since the base revision, we're done
117
file_id = lookup_file_id(path)
118
if stat.S_ISLNK(mode):
122
ie = cls(file_id, name.decode("utf-8"), parent_id)
123
if ie.kind == "file":
124
ie.executable = mode_is_executable(mode)
125
if base_hexsha == hexsha and mode_kind(base_mode) == mode_kind(mode):
126
base_exec = base_bzr_tree.is_executable(path)
127
if ie.kind == "symlink":
128
ie.symlink_target = base_bzr_tree.get_symlink_target(path)
130
ie.text_size = base_bzr_tree.get_file_size(path)
131
ie.text_sha1 = base_bzr_tree.get_file_sha1(path)
132
if ie.kind == "symlink" or ie.executable == base_exec:
133
ie.revision = base_bzr_tree.get_file_revision(path)
135
blob = lookup_object(hexsha)
137
blob = lookup_object(hexsha)
138
if ie.kind == "symlink":
140
ie.symlink_target = blob.data.decode("utf-8")
142
ie.text_size = sum(imap(len, blob.chunked))
143
ie.text_sha1 = osutils.sha_strings(blob.chunked)
144
# Check what revision we should store
146
for ptree in parent_bzr_trees:
148
ppath = ptree.id2path(file_id)
149
except errors.NoSuchId:
151
pkind = ptree.kind(ppath, file_id)
152
if (pkind == ie.kind and
153
((pkind == "symlink" and ptree.get_symlink_target(ppath, file_id) == ie.symlink_target) or
154
(pkind == "file" and ptree.get_file_sha1(ppath, file_id) == ie.text_sha1 and
155
ptree.is_executable(ppath, file_id) == ie.executable))):
156
# found a revision in one of the parents to use
157
ie.revision = ptree.get_file_revision(ppath, file_id)
159
parent_key = (file_id, ptree.get_file_revision(ppath, file_id))
160
if not parent_key in parent_keys:
161
parent_keys.append(parent_key)
162
if ie.revision is None:
163
# Need to store a new revision
164
ie.revision = revision_id
165
if ie.revision is None:
166
raise ValueError("no file revision set")
167
if ie.kind == 'symlink':
170
chunks = blob.chunked
171
texts.insert_record_stream([
172
ChunkedContentFactory((file_id, ie.revision),
173
tuple(parent_keys), ie.text_sha1, chunks)])
175
if base_hexsha is not None:
176
old_path = path.decode("utf-8") # Renames are not supported yet
177
if stat.S_ISDIR(base_mode):
178
invdelta.extend(remove_disappeared_children(base_bzr_tree, old_path,
179
lookup_object(base_hexsha), [], lookup_object))
182
new_path = path.decode("utf-8")
183
invdelta.append((old_path, new_path, file_id, ie))
184
if base_hexsha != hexsha:
185
store_updater.add_object(blob, (ie.file_id, ie.revision), path)
189
class SubmodulesRequireSubtrees(BzrError):
190
_fmt = ("The repository you are fetching from contains submodules, "
191
"which are not yet supported.")
195
def import_git_submodule(texts, mapping, path, name, (base_hexsha, hexsha),
196
base_bzr_tree, parent_id, revision_id, parent_bzr_trees, lookup_object,
197
(base_mode, mode), store_updater, lookup_file_id):
198
"""Import a git submodule."""
199
if base_hexsha == hexsha and base_mode == mode:
201
file_id = lookup_file_id(path)
203
ie = TreeReference(file_id, name.decode("utf-8"), parent_id)
204
ie.revision = revision_id
205
if base_hexsha is not None:
206
old_path = path.decode("utf-8") # Renames are not supported yet
207
if stat.S_ISDIR(base_mode):
208
invdelta.extend(remove_disappeared_children(base_bzr_tree, old_path,
209
lookup_object(base_hexsha), [], lookup_object))
212
ie.reference_revision = mapping.revision_id_foreign_to_bzr(hexsha)
213
texts.insert_record_stream([
214
ChunkedContentFactory((file_id, ie.revision), (), None, [])])
215
invdelta.append((old_path, path, file_id, ie))
219
def remove_disappeared_children(base_bzr_tree, path, base_tree, existing_children,
221
"""Generate an inventory delta for removed children.
223
:param base_bzr_tree: Base bzr tree against which to generate the
225
:param path: Path to process (unicode)
226
:param base_tree: Git Tree base object
227
:param existing_children: Children that still exist
228
:param lookup_object: Lookup a git object by its SHA1
229
:return: Inventory delta, as list
231
if type(path) is not unicode:
232
raise TypeError(path)
234
for name, mode, hexsha in base_tree.iteritems():
235
if name in existing_children:
237
c_path = posixpath.join(path, name.decode("utf-8"))
238
file_id = base_bzr_tree.path2id(c_path)
240
raise TypeError(file_id)
241
ret.append((c_path, None, file_id, None))
242
if stat.S_ISDIR(mode):
243
ret.extend(remove_disappeared_children(
244
base_bzr_tree, c_path, lookup_object(hexsha), [], lookup_object))
248
def import_git_tree(texts, mapping, path, name, (base_hexsha, hexsha),
249
base_bzr_tree, parent_id, revision_id, parent_bzr_trees,
250
lookup_object, (base_mode, mode), store_updater,
251
lookup_file_id, allow_submodules=False):
252
"""Import a git tree object into a bzr repository.
254
:param texts: VersionedFiles object to add to
255
:param path: Path in the tree (str)
256
:param name: Name of the tree (str)
257
:param tree: A git tree object
258
:param base_bzr_tree: Base inventory against which to return inventory delta
259
:return: Inventory delta for this subtree
261
if type(path) is not str:
262
raise TypeError(path)
263
if type(name) is not str:
264
raise TypeError(name)
265
if base_hexsha == hexsha and base_mode == mode:
266
# If nothing has changed since the base revision, we're done
269
file_id = lookup_file_id(path)
270
# We just have to hope this is indeed utf-8:
271
ie = InventoryDirectory(file_id, name.decode("utf-8"), parent_id)
272
tree = lookup_object(hexsha)
273
if base_hexsha is None:
275
old_path = None # Newly appeared here
277
base_tree = lookup_object(base_hexsha)
278
old_path = path.decode("utf-8") # Renames aren't supported yet
279
new_path = path.decode("utf-8")
280
if base_tree is None or type(base_tree) is not Tree:
281
ie.revision = revision_id
282
invdelta.append((old_path, new_path, ie.file_id, ie))
283
texts.insert_record_stream([
284
ChunkedContentFactory((ie.file_id, ie.revision), (), None, [])])
285
# Remember for next time
286
existing_children = set()
288
for name, child_mode, child_hexsha in tree.iteritems():
289
existing_children.add(name)
290
child_path = posixpath.join(path, name)
291
if type(base_tree) is Tree:
293
child_base_mode, child_base_hexsha = base_tree[name]
295
child_base_hexsha = None
298
child_base_hexsha = None
300
if stat.S_ISDIR(child_mode):
301
subinvdelta, grandchildmodes = import_git_tree(texts, mapping,
302
child_path, name, (child_base_hexsha, child_hexsha),
303
base_bzr_tree, file_id, revision_id, parent_bzr_trees,
304
lookup_object, (child_base_mode, child_mode), store_updater,
305
lookup_file_id, allow_submodules=allow_submodules)
306
elif S_ISGITLINK(child_mode): # submodule
307
if not allow_submodules:
308
raise SubmodulesRequireSubtrees()
309
subinvdelta, grandchildmodes = import_git_submodule(texts, mapping,
310
child_path, name, (child_base_hexsha, child_hexsha),
311
base_bzr_tree, file_id, revision_id, parent_bzr_trees,
312
lookup_object, (child_base_mode, child_mode), store_updater,
315
if not mapping.is_special_file(name):
316
subinvdelta = import_git_blob(texts, mapping, child_path, name,
317
(child_base_hexsha, child_hexsha), base_bzr_tree, file_id,
318
revision_id, parent_bzr_trees, lookup_object,
319
(child_base_mode, child_mode), store_updater, lookup_file_id)
323
child_modes.update(grandchildmodes)
324
invdelta.extend(subinvdelta)
325
if child_mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
326
stat.S_IFLNK, DEFAULT_FILE_MODE|0111,
328
child_modes[child_path] = child_mode
329
# Remove any children that have disappeared
330
if base_tree is not None and type(base_tree) is Tree:
331
invdelta.extend(remove_disappeared_children(base_bzr_tree, old_path,
332
base_tree, existing_children, lookup_object))
333
store_updater.add_object(tree, (file_id, ), path)
334
return invdelta, child_modes
337
def verify_commit_reconstruction(target_git_object_retriever, lookup_object,
338
o, rev, ret_tree, parent_trees, mapping, unusual_modes, verifiers):
339
new_unusual_modes = mapping.export_unusual_file_modes(rev)
340
if new_unusual_modes != unusual_modes:
341
raise AssertionError("unusual modes don't match: %r != %r" % (
342
unusual_modes, new_unusual_modes))
343
# Verify that we can reconstruct the commit properly
344
rec_o = target_git_object_retriever._reconstruct_commit(rev, o.tree, True,
347
raise AssertionError("Reconstructed commit differs: %r != %r" % (
351
for path, obj, ie in _tree_to_objects(ret_tree, parent_trees,
352
target_git_object_retriever._cache.idmap, unusual_modes,
353
mapping.BZR_DUMMY_FILE):
354
old_obj_id = tree_lookup_path(lookup_object, o.tree, path)[1]
356
if obj.id != old_obj_id:
357
diff.append((path, lookup_object(old_obj_id), obj))
358
for (path, old_obj, new_obj) in diff:
359
while (old_obj.type_name == "tree" and
360
new_obj.type_name == "tree" and
361
sorted(old_obj) == sorted(new_obj)):
363
if old_obj[name][0] != new_obj[name][0]:
364
raise AssertionError("Modes for %s differ: %o != %o" %
365
(path, old_obj[name][0], new_obj[name][0]))
366
if old_obj[name][1] != new_obj[name][1]:
367
# Found a differing child, delve deeper
368
path = posixpath.join(path, name)
369
old_obj = lookup_object(old_obj[name][1])
370
new_obj = new_objs[path]
372
raise AssertionError("objects differ for %s: %r != %r" % (path,
376
def ensure_inventories_in_repo(repo, trees):
377
real_inv_vf = repo.inventories.without_fallbacks()
379
revid = t.get_revision_id()
380
if not real_inv_vf.get_parent_map([(revid, )]):
381
repo.add_inventory(revid, t.inventory, t.get_parent_ids())
384
def import_git_commit(repo, mapping, head, lookup_object,
385
target_git_object_retriever, trees_cache):
386
o = lookup_object(head)
387
# Note that this uses mapping.revision_id_foreign_to_bzr. If the parents
388
# were bzr roundtripped revisions they would be specified in the
390
rev, roundtrip_revid, verifiers = mapping.import_commit(
391
o, mapping.revision_id_foreign_to_bzr)
392
if roundtrip_revid is not None:
393
original_revid = rev.revision_id
394
rev.revision_id = roundtrip_revid
395
# We have to do this here, since we have to walk the tree and
396
# we need to make sure to import the blobs / trees with the right
397
# path; this may involve adding them more than once.
398
parent_trees = trees_cache.revision_trees(rev.parent_ids)
399
ensure_inventories_in_repo(repo, parent_trees)
400
if parent_trees == []:
401
base_bzr_tree = trees_cache.revision_tree(NULL_REVISION)
405
base_bzr_tree = parent_trees[0]
406
base_tree = lookup_object(o.parents[0]).tree
407
base_mode = stat.S_IFDIR
408
store_updater = target_git_object_retriever._get_updater(rev)
409
tree_supplement = mapping.get_fileid_map(lookup_object, o.tree)
410
inv_delta, unusual_modes = import_git_tree(repo.texts,
411
mapping, "", "", (base_tree, o.tree), base_bzr_tree,
412
None, rev.revision_id, parent_trees,
413
lookup_object, (base_mode, stat.S_IFDIR), store_updater,
414
tree_supplement.lookup_file_id,
415
allow_submodules=getattr(repo._format, "supports_tree_reference",
417
if unusual_modes != {}:
418
for path, mode in unusual_modes.iteritems():
419
warn_unusual_mode(rev.foreign_revid, path, mode)
420
mapping.import_unusual_file_modes(rev, unusual_modes)
422
basis_id = rev.parent_ids[0]
424
basis_id = NULL_REVISION
425
base_bzr_inventory = None
428
base_bzr_inventory = base_bzr_tree.root_inventory
429
except AttributeError: # bzr < 2.6
430
base_bzr_inventory = base_bzr_tree.inventory
431
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
432
inv_delta, rev.revision_id, rev.parent_ids,
434
ret_tree = InventoryRevisionTree(repo, inv, rev.revision_id)
436
if verifiers and roundtrip_revid is not None:
437
testament = StrictTestament3(rev, ret_tree)
438
calculated_verifiers = { "testament3-sha1": testament.as_sha1() }
439
if calculated_verifiers != verifiers:
440
trace.mutter("Testament SHA1 %r for %r did not match %r.",
441
calculated_verifiers["testament3-sha1"],
442
rev.revision_id, verifiers["testament3-sha1"])
443
rev.revision_id = original_revid
444
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
445
inv_delta, rev.revision_id, rev.parent_ids, base_bzr_tree)
446
ret_tree = InventoryRevisionTree(repo, inv, rev.revision_id)
448
calculated_verifiers = {}
449
store_updater.add_object(o, calculated_verifiers, None)
450
store_updater.finish()
451
trees_cache.add(ret_tree)
452
repo.add_revision(rev.revision_id, rev)
453
if "verify" in debug.debug_flags:
454
verify_commit_reconstruction(target_git_object_retriever,
455
lookup_object, o, rev, ret_tree, parent_trees, mapping,
456
unusual_modes, verifiers)
459
def import_git_objects(repo, mapping, object_iter,
460
target_git_object_retriever, heads, pb=None, limit=None):
461
"""Import a set of git objects into a bzr repository.
463
:param repo: Target Bazaar repository
464
:param mapping: Mapping to use
465
:param object_iter: Iterator over Git objects.
466
:return: Tuple with pack hints and last imported revision id
468
def lookup_object(sha):
470
return object_iter[sha]
472
return target_git_object_retriever[sha]
475
heads = list(set(heads))
476
trees_cache = LRUTreeCache(repo)
477
# Find and convert commit objects
480
pb.update("finding revisions to fetch", len(graph), None)
484
if type(head) is not str:
485
raise TypeError(head)
487
o = lookup_object(head)
490
if isinstance(o, Commit):
491
rev, roundtrip_revid, verifiers = mapping.import_commit(o,
492
mapping.revision_id_foreign_to_bzr)
493
if (repo.has_revision(rev.revision_id) or
494
(roundtrip_revid and repo.has_revision(roundtrip_revid))):
496
graph.append((o.id, o.parents))
497
heads.extend([p for p in o.parents if p not in checked])
498
elif isinstance(o, Tag):
499
if o.object[1] not in checked:
500
heads.append(o.object[1])
502
trace.warning("Unable to import head object %r" % o)
505
# Order the revisions
506
# Create the inventory objects
508
revision_ids = topo_sort(graph)
510
if limit is not None:
511
revision_ids = revision_ids[:limit]
513
for offset in range(0, len(revision_ids), batch_size):
514
target_git_object_retriever.start_write_group()
516
repo.start_write_group()
518
for i, head in enumerate(
519
revision_ids[offset:offset+batch_size]):
521
pb.update("fetching revisions", offset+i,
523
import_git_commit(repo, mapping, head, lookup_object,
524
target_git_object_retriever, trees_cache)
527
repo.abort_write_group()
530
hint = repo.commit_write_group()
532
pack_hints.extend(hint)
534
target_git_object_retriever.abort_write_group()
537
target_git_object_retriever.commit_write_group()
538
return pack_hints, last_imported
541
class InterFromGitRepository(InterRepository):
543
_matching_repo_format = GitRepositoryFormat()
545
def _target_has_shas(self, shas):
546
raise NotImplementedError(self._target_has_shas)
548
def get_determine_wants_heads(self, wants, include_tags=False):
550
def determine_wants(refs):
551
potential = set(wants)
553
for k, unpeeled in refs.iteritems():
554
if k.endswith("^{}"):
558
if unpeeled == ZERO_SHA:
560
potential.add(unpeeled)
561
return list(potential - self._target_has_shas(potential))
562
return determine_wants
564
def determine_wants_all(self, refs):
565
raise NotImplementedError(self.determine_wants_all)
568
def _get_repo_format_to_test():
571
def copy_content(self, revision_id=None):
572
"""See InterRepository.copy_content."""
573
self.fetch(revision_id, find_ghosts=False)
575
def search_missing_revision_ids(self,
576
find_ghosts=True, revision_ids=None, if_present_ids=None,
578
if limit is not None:
579
raise errors.FetchLimitUnsupported(self)
583
todo.extend(revision_ids)
585
todo.extend(revision_ids)
586
for revid in revision_ids:
587
if revid == NULL_REVISION:
589
git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
590
git_shas.append(git_sha)
591
walker = Walker(self.source._git.object_store,
592
include=git_shas, exclude=[
593
sha for sha in self.target.controldir.get_refs_container().as_dict().values()
595
missing_revids = set()
597
missing_revids.add(self.source.lookup_foreign_revision_id(entry.commit.id))
598
return self.source.revision_ids_to_search_result(missing_revids)
601
class InterGitNonGitRepository(InterFromGitRepository):
602
"""Base InterRepository that copies revisions from a Git into a non-Git
605
def _target_has_shas(self, shas):
609
revid = self.source.lookup_foreign_revision_id(sha)
610
except NotCommitError:
611
# Commit is definitely not present
615
return set([revids[r] for r in self.target.has_revisions(revids)])
617
def determine_wants_all(self, refs):
619
for k, v in refs.iteritems():
620
# For non-git target repositories, only worry about peeled
623
potential.add(self.source.controldir.get_peeled(k) or v)
624
return list(potential - self._target_has_shas(potential))
626
def get_determine_wants_heads(self, wants, include_tags=False):
628
def determine_wants(refs):
629
potential = set(wants)
631
for k, unpeeled in refs.iteritems():
634
if unpeeled == ZERO_SHA:
636
potential.add(self.source.controldir.get_peeled(k) or unpeeled)
637
return list(potential - self._target_has_shas(potential))
638
return determine_wants
640
def _warn_slow(self):
642
'Fetching from Git to Bazaar repository. '
643
'For better performance, fetch into a Git repository.')
645
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
646
"""Fetch objects from a remote server.
648
:param determine_wants: determine_wants callback
649
:param mapping: BzrGitMapping to use
650
:param limit: Maximum number of commits to import.
651
:return: Tuple with pack hint, last imported revision id and remote refs
653
raise NotImplementedError(self.fetch_objects)
655
def get_determine_wants_revids(self, revids, include_tags=False):
657
for revid in set(revids):
658
if self.target.has_revision(revid):
660
git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
662
return self.get_determine_wants_heads(wants, include_tags=include_tags)
664
def fetch(self, revision_id=None, find_ghosts=False,
665
mapping=None, fetch_spec=None, include_tags=False):
667
mapping = self.source.get_mapping()
668
if revision_id is not None:
669
interesting_heads = [revision_id]
670
elif fetch_spec is not None:
671
recipe = fetch_spec.get_recipe()
672
if recipe[0] in ("search", "proxy-search"):
673
interesting_heads = recipe[1]
675
raise AssertionError("Unsupported search result type %s" %
678
interesting_heads = None
680
if interesting_heads is not None:
681
determine_wants = self.get_determine_wants_revids(
682
interesting_heads, include_tags=include_tags)
684
determine_wants = self.determine_wants_all
686
(pack_hint, _, remote_refs) = self.fetch_objects(determine_wants,
688
if pack_hint is not None and self.target._format.pack_compresses:
689
self.target.pack(hint=pack_hint)
693
_GIT_PROGRESS_RE = re.compile(r"(.*?): +(\d+)% \((\d+)/(\d+)\)")
694
def report_git_progress(pb, text):
695
text = text.rstrip("\r\n")
696
trace.mutter('git: %s', text)
697
g = _GIT_PROGRESS_RE.match(text)
699
(text, pct, current, total) = g.groups()
700
pb.update(text, int(current), int(total))
702
pb.update(text, 0, 0)
705
class DetermineWantsRecorder(object):
707
def __init__(self, actual):
710
self.remote_refs = {}
712
def __call__(self, refs):
713
if type(refs) is not dict:
714
raise TypeError(refs)
715
self.remote_refs = refs
716
self.wants = self.actual(refs)
720
class InterRemoteGitNonGitRepository(InterGitNonGitRepository):
721
"""InterRepository that copies revisions from a remote Git into a non-Git
724
def get_target_heads(self):
725
# FIXME: This should be more efficient
726
all_revs = self.target.all_revision_ids()
727
parent_map = self.target.get_parent_map(all_revs)
729
map(all_parents.update, parent_map.itervalues())
730
return set(all_revs) - all_parents
732
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
733
"""See `InterGitNonGitRepository`."""
735
store = BazaarObjectStore(self.target, mapping)
736
with store.lock_write():
737
heads = self.get_target_heads()
738
graph_walker = ObjectStoreGraphWalker(
739
[store._lookup_revision_sha1(head) for head in heads],
740
lambda sha: store[sha].parents)
741
wants_recorder = DetermineWantsRecorder(determine_wants)
743
pb = ui.ui_factory.nested_progress_bar()
745
objects_iter = self.source.fetch_objects(
746
wants_recorder, graph_walker, store.get_raw,
747
progress=lambda text: report_git_progress(pb, text),)
748
trace.mutter("Importing %d new revisions",
749
len(wants_recorder.wants))
750
(pack_hint, last_rev) = import_git_objects(self.target,
751
mapping, objects_iter, store, wants_recorder.wants, pb,
753
return (pack_hint, last_rev, wants_recorder.remote_refs)
758
def is_compatible(source, target):
759
"""Be compatible with GitRepository."""
760
if not isinstance(source, RemoteGitRepository):
762
if not target.supports_rich_root():
764
if isinstance(target, GitRepository):
766
if not getattr(target._format, "supports_full_versioned_files", True):
771
class InterLocalGitNonGitRepository(InterGitNonGitRepository):
772
"""InterRepository that copies revisions from a local Git into a non-Git
775
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
776
"""See `InterGitNonGitRepository`."""
778
remote_refs = self.source.controldir.get_refs_container().as_dict()
779
wants = determine_wants(remote_refs)
781
pb = ui.ui_factory.nested_progress_bar()
782
target_git_object_retriever = BazaarObjectStore(self.target, mapping)
784
target_git_object_retriever.lock_write()
786
(pack_hint, last_rev) = import_git_objects(self.target,
787
mapping, self.source._git.object_store,
788
target_git_object_retriever, wants, pb, limit)
789
return (pack_hint, last_rev, remote_refs)
791
target_git_object_retriever.unlock()
796
def is_compatible(source, target):
797
"""Be compatible with GitRepository."""
798
if not isinstance(source, LocalGitRepository):
800
if not target.supports_rich_root():
802
if isinstance(target, GitRepository):
804
if not getattr(target._format, "supports_full_versioned_files", True):
809
class InterGitGitRepository(InterFromGitRepository):
810
"""InterRepository that copies between Git repositories."""
812
def fetch_refs(self, update_refs, lossy=False):
814
raise errors.LossyPushToSameVCS(self.source, self.target)
815
old_refs = self.target.controldir.get_refs_container()
817
def determine_wants(heads):
818
old_refs = dict([(k, (v, None)) for (k, v) in heads.as_dict().iteritems()])
819
new_refs = update_refs(old_refs)
820
ref_changes.update(new_refs)
821
return [sha1 for (sha1, bzr_revid) in new_refs.itervalues()]
822
self.fetch_objects(determine_wants, lossy=lossy)
823
for k, (git_sha, bzr_revid) in ref_changes.iteritems():
824
self.target._git.refs[k] = git_sha
825
new_refs = self.target.controldir.get_refs_container()
826
return None, old_refs, new_refs
828
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False):
830
raise errors.LossyPushToSameVCS(self.source, self.target)
831
if limit is not None:
832
raise errors.FetchLimitUnsupported(self)
833
graphwalker = self.target._git.get_graph_walker()
834
if (isinstance(self.source, LocalGitRepository) and
835
isinstance(self.target, LocalGitRepository)):
836
pb = ui.ui_factory.nested_progress_bar()
838
refs = self.source._git.fetch(self.target._git, determine_wants,
839
lambda text: report_git_progress(pb, text))
842
return (None, None, refs)
843
elif (isinstance(self.source, LocalGitRepository) and
844
isinstance(self.target, RemoteGitRepository)):
845
raise NotImplementedError
846
elif (isinstance(self.source, RemoteGitRepository) and
847
isinstance(self.target, LocalGitRepository)):
848
pb = ui.ui_factory.nested_progress_bar()
850
f, commit, abort = self.target._git.object_store.add_pack()
852
refs = self.source.controldir.fetch_pack(
853
determine_wants, graphwalker, f.write,
854
lambda text: report_git_progress(pb, text))
856
return (None, None, refs)
857
except BaseException:
863
raise AssertionError("fetching between %r and %r not supported" %
864
(self.source, self.target))
866
def _target_has_shas(self, shas):
867
return set([sha for sha in shas if sha in self.target._git.object_store])
869
def fetch(self, revision_id=None, find_ghosts=False,
870
mapping=None, fetch_spec=None, branches=None, limit=None, include_tags=False):
872
mapping = self.source.get_mapping()
874
if revision_id is not None:
876
elif fetch_spec is not None:
877
recipe = fetch_spec.get_recipe()
878
if recipe[0] in ("search", "proxy-search"):
881
raise AssertionError(
882
"Unsupported search result type %s" % recipe[0])
884
if branches is not None:
885
def determine_wants(refs):
887
for name, value in refs.iteritems():
888
if value == ZERO_SHA:
891
if name in branches or (include_tags and is_tag(name)):
894
elif fetch_spec is None and revision_id is None:
895
determine_wants = self.determine_wants_all
897
determine_wants = self.get_determine_wants_revids(args, include_tags=include_tags)
898
wants_recorder = DetermineWantsRecorder(determine_wants)
899
self.fetch_objects(wants_recorder, mapping, limit=limit)
900
return wants_recorder.remote_refs
903
def is_compatible(source, target):
904
"""Be compatible with GitRepository."""
905
return (isinstance(source, GitRepository) and
906
isinstance(target, GitRepository))
908
def get_determine_wants_revids(self, revids, include_tags=False):
910
for revid in set(revids):
911
if self.target.has_revision(revid):
913
git_sha, mapping = self.source.lookup_bzr_revision_id(revid)
915
return self.get_determine_wants_heads(wants, include_tags=include_tags)
917
def determine_wants_all(self, refs):
918
potential = set([v for v in refs.values() if not v == ZERO_SHA])
919
return list(potential - self._target_has_shas(potential))