125
125
raise NotImplementedError(self.fetch_refs)
127
127
def search_missing_revision_ids(self,
128
find_ghosts=True, revision_ids=None, if_present_ids=None,
128
find_ghosts=True, revision_ids=None,
129
if_present_ids=None, limit=None):
130
130
if limit is not None:
131
131
raise FetchLimitUnsupported(self)
145
145
raise NoSuchRevision(revid, self.source)
146
146
git_shas.append(git_sha)
147
walker = Walker(self.source_store,
148
include=git_shas, exclude=[
149
151
sha for sha in self.target.controldir.get_refs_container().as_dict().values()
150
152
if sha != ZERO_SHA])
151
153
missing_revids = set()
152
154
for entry in walker:
153
for (kind, type_data) in self.source_store.lookup_git_sha(entry.commit.id):
155
for (kind, type_data) in self.source_store.lookup_git_sha(
154
157
if kind == "commit":
155
158
missing_revids.add(type_data[0])
156
159
return self.source.revision_ids_to_search_result(missing_revids)
219
222
new_stop_revids = []
220
223
for revid in stop_revids:
221
224
sha1 = revid_sha_map.get(revid)
222
if (not revid in missing and
223
self._revision_needs_fetching(sha1, revid)):
225
if (revid not in missing and
226
self._revision_needs_fetching(sha1, revid)):
224
227
missing.add(revid)
225
228
new_stop_revids.append(revid)
226
229
stop_revids = set()
250
252
if not v.startswith(SYMREF):
252
for (kind, type_data) in self.source_store.lookup_git_sha(v):
253
if kind == "commit" and self.source.has_revision(type_data[0]):
254
for (kind, type_data) in self.source_store.lookup_git_sha(
256
if kind == "commit" and self.source.has_revision(
254
258
revid = type_data[0]
264
268
old_refs = self._get_target_bzr_refs()
265
269
new_refs = update_refs(old_refs)
266
270
revidmap = self.fetch_objects(
267
[(git_sha, bzr_revid) for (git_sha, bzr_revid) in new_refs.values() if git_sha is None or not git_sha.startswith(SYMREF)], lossy=lossy)
271
[(git_sha, bzr_revid)
272
for (git_sha, bzr_revid) in new_refs.values()
273
if git_sha is None or not git_sha.startswith(SYMREF)],
268
275
for name, (gitid, revid) in viewitems(new_refs):
269
276
if gitid is None:
273
280
gitid = self.source_store._lookup_revision_sha1(revid)
274
281
if gitid.startswith(SYMREF):
275
self.target_refs.set_symbolic_ref(name, gitid[len(SYMREF):])
282
self.target_refs.set_symbolic_ref(
283
name, gitid[len(SYMREF):])
278
286
old_git_id = old_refs[name][0]
285
293
def fetch_objects(self, revs, lossy, limit=None):
286
294
if not lossy and not self.mapping.roundtripping:
287
295
for git_sha, bzr_revid in revs:
288
if bzr_revid is not None and needs_roundtripping(self.source, bzr_revid):
296
if (bzr_revid is not None and
297
needs_roundtripping(self.source, bzr_revid)):
289
298
raise NoPushSupport(self.source, self.target, self.mapping,
291
300
with self.source_store.lock_read():
296
305
object_generator = MissingObjectsIterator(
297
306
self.source_store, self.source, pb)
298
307
for (old_revid, git_sha) in object_generator.import_revisions(
301
new_revid = self.mapping.revision_id_foreign_to_bzr(git_sha)
310
new_revid = self.mapping.revision_id_foreign_to_bzr(
303
313
new_revid = old_revid
315
325
def fetch(self, revision_id=None, pb=None, find_ghosts=False,
316
fetch_spec=None, mapped_refs=None):
326
fetch_spec=None, mapped_refs=None):
317
327
if mapped_refs is not None:
318
328
stop_revisions = mapped_refs
319
329
elif revision_id is not None:
323
333
if recipe[0] in ("search", "proxy-search"):
324
334
stop_revisions = [(None, revid) for revid in recipe[1]]
326
raise AssertionError("Unsupported search result type %s" % recipe[0])
336
raise AssertionError(
337
"Unsupported search result type %s" % recipe[0])
328
stop_revisions = [(None, revid) for revid in self.source.all_revision_ids()]
339
stop_revisions = [(None, revid)
340
for revid in self.source.all_revision_ids()]
329
341
self._warn_slow()
331
343
self.fetch_objects(stop_revisions, lossy=False)
347
359
raise NoPushSupport(self.source, self.target, self.mapping)
348
360
unpeel_map = UnpeelMap.from_repository(self.source)
350
363
def git_update_refs(old_refs):
352
self.old_refs = dict([(k, (v, None)) for (k, v) in viewitems(old_refs)])
365
self.old_refs = dict([(k, (v, None))
366
for (k, v) in viewitems(old_refs)])
353
367
self.new_refs = update_refs(self.old_refs)
354
368
for name, (gitid, revid) in viewitems(self.new_refs):
355
369
if gitid is None:
356
370
git_sha = self.source_store._lookup_revision_sha1(revid)
357
gitid = unpeel_map.re_unpeel_tag(git_sha, old_refs.get(name))
371
gitid = unpeel_map.re_unpeel_tag(
372
git_sha, old_refs.get(name))
358
373
if not overwrite:
359
if remote_divergence(old_refs.get(name), gitid, self.source_store):
374
if remote_divergence(
375
old_refs.get(name), gitid, self.source_store):
360
376
raise DivergedBranches(self.source, self.target)
361
377
ret[name] = gitid
363
379
self._warn_slow()
364
380
with self.source_store.lock_read():
365
new_refs = self.target.send_pack(git_update_refs,
366
self.source_store.generate_lossy_pack_data)
381
new_refs = self.target.send_pack(
382
git_update_refs, self.source_store.generate_lossy_pack_data)
367
383
# FIXME: revidmap?
368
384
return revidmap, self.old_refs, self.new_refs
398
414
def get_determine_wants_heads(self, wants, include_tags=False):
399
415
wants = set(wants)
400
417
def determine_wants(refs):
401
418
unpeel_lookup = {}
402
419
for k, v in viewitems(refs):
427
444
self.fetch(revision_id, find_ghosts=False)
429
446
def search_missing_revision_ids(self,
430
find_ghosts=True, revision_ids=None, if_present_ids=None,
447
find_ghosts=True, revision_ids=None,
448
if_present_ids=None, limit=None):
432
449
if limit is not None:
433
450
raise FetchLimitUnsupported(self)
434
451
if revision_ids is None and if_present_ids is None:
488
505
:param determine_wants: determine_wants callback
489
506
:param mapping: BzrGitMapping to use
490
507
:param limit: Maximum number of commits to import.
491
:return: Tuple with pack hint, last imported revision id and remote refs
508
:return: Tuple with pack hint, last imported revision id and remote
493
511
raise NotImplementedError(self.fetch_objects)
524
542
determine_wants = self.determine_wants_all
526
544
(pack_hint, _, remote_refs) = self.fetch_objects(determine_wants,
528
546
if pack_hint is not None and self.target._format.pack_compresses:
529
547
self.target.pack(hint=pack_hint)
530
548
return remote_refs
560
578
wants_recorder, graph_walker, store.get_raw)
561
579
trace.mutter("Importing %d new revisions",
562
580
len(wants_recorder.wants))
563
(pack_hint, last_rev) = import_git_objects(self.target,
564
mapping, objects_iter, store, wants_recorder.wants, pb,
581
(pack_hint, last_rev) = import_git_objects(
582
self.target, mapping, objects_iter, store,
583
wants_recorder.wants, pb, limit)
566
584
return (pack_hint, last_rev, wants_recorder.remote_refs)
590
608
self._warn_slow()
591
609
remote_refs = self.source.controldir.get_refs_container().as_dict()
592
610
wants = determine_wants(remote_refs)
594
611
pb = ui.ui_factory.nested_progress_bar()
595
612
target_git_object_retriever = get_object_store(self.target, mapping)
597
614
target_git_object_retriever.lock_write()
599
(pack_hint, last_rev) = import_git_objects(self.target,
600
mapping, self.source._git.object_store,
616
(pack_hint, last_rev) = import_git_objects(
617
self.target, mapping, self.source._git.object_store,
601
618
target_git_object_retriever, wants, pb, limit)
602
619
return (pack_hint, last_rev, remote_refs)
627
644
raise LossyPushToSameVCS(self.source, self.target)
628
645
old_refs = self.target.controldir.get_refs_container()
630
648
def determine_wants(heads):
631
old_refs = dict([(k, (v, None)) for (k, v) in viewitems(heads.as_dict())])
649
old_refs = dict([(k, (v, None))
650
for (k, v) in viewitems(heads.as_dict())])
632
651
new_refs = update_refs(old_refs)
633
652
ref_changes.update(new_refs)
634
653
return [sha1 for (sha1, bzr_revid) in viewvalues(new_refs)]
638
657
new_refs = self.target.controldir.get_refs_container()
639
658
return None, old_refs, new_refs
641
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False):
660
def fetch_objects(self, determine_wants, mapping=None, limit=None,
642
662
raise NotImplementedError(self.fetch_objects)
644
664
def _target_has_shas(self, shas):
645
return set([sha for sha in shas if sha in self.target._git.object_store])
666
[sha for sha in shas if sha in self.target._git.object_store])
647
668
def fetch(self, revision_id=None, find_ghosts=False,
648
mapping=None, fetch_spec=None, branches=None, limit=None, include_tags=False):
669
mapping=None, fetch_spec=None, branches=None, limit=None,
649
671
if mapping is None:
650
672
mapping = self.source.get_mapping()
651
673
if revision_id is not None:
671
693
elif fetch_spec is None and revision_id is None:
672
694
determine_wants = self.determine_wants_all
674
determine_wants = self.get_determine_wants_revids(args, include_tags=include_tags)
696
determine_wants = self.get_determine_wants_revids(
697
args, include_tags=include_tags)
675
698
wants_recorder = DetermineWantsRecorder(determine_wants)
676
699
self.fetch_objects(wants_recorder, mapping, limit=limit)
677
700
return wants_recorder.remote_refs
695
718
class InterLocalGitLocalGitRepository(InterGitGitRepository):
697
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False):
720
def fetch_objects(self, determine_wants, mapping=None, limit=None,
699
723
raise LossyPushToSameVCS(self.source, self.target)
700
724
if limit is not None:
704
728
progress = DefaultProgressReporter(pb).progress
706
730
refs = self.source._git.fetch(
707
self.target._git, determine_wants,
731
self.target._git, determine_wants,
711
735
return (None, None, refs)
720
744
class InterRemoteGitLocalGitRepository(InterGitGitRepository):
722
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False):
746
def fetch_objects(self, determine_wants, mapping=None, limit=None,
724
749
raise LossyPushToSameVCS(self.source, self.target)
725
750
if limit is not None:
726
751
raise FetchLimitUnsupported(self)
727
752
graphwalker = self.target._git.get_graph_walker()
728
if CAPABILITY_THIN_PACK in self.source.controldir._client._fetch_capabilities:
753
if (CAPABILITY_THIN_PACK in
754
self.source.controldir._client._fetch_capabilities):
729
755
# TODO(jelmer): Avoid reading entire file into memory and
730
756
# only processing it after the whole file has been fetched.