107
110
cls = InventoryFile
108
111
ie = cls(file_id, name.decode("utf-8"), parent_id)
109
ie.executable = mode_is_executable(mode)
112
if ie.kind == "file":
113
ie.executable = mode_is_executable(mode)
110
114
if base_hexsha == hexsha and mode_kind(base_mode) == mode_kind(mode):
111
115
base_ie = base_inv[base_inv.path2id(path)]
112
116
ie.text_size = base_ie.text_size
113
117
ie.text_sha1 = base_ie.text_sha1
114
ie.symlink_target = base_ie.symlink_target
118
if ie.kind == "symlink":
119
ie.symlink_target = base_ie.symlink_target
115
120
if ie.executable == base_ie.executable:
116
121
ie.revision = base_ie.revision
155
158
tuple(parent_keys), ie.text_sha1, chunks)])
157
160
if base_hexsha is not None:
158
old_path = path # Renames are not supported yet
161
old_path = path.decode("utf-8") # Renames are not supported yet
159
162
if stat.S_ISDIR(base_mode):
160
163
invdelta.extend(remove_disappeared_children(base_inv, old_path,
161
164
lookup_object(base_hexsha), [], lookup_object))
164
invdelta.append((old_path, path, file_id, ie))
167
new_path = path.decode("utf-8")
168
invdelta.append((old_path, new_path, file_id, ie))
165
169
if base_hexsha != hexsha:
166
store_updater.add_object(blob, ie)
170
store_updater.add_object(blob, ie, path)
198
202
:param base_inv: Base inventory against which to generate the
200
:param path: Path to process
204
:param path: Path to process (unicode)
201
205
:param base_tree: Git Tree base object
202
206
:param existing_children: Children that still exist
203
207
:param lookup_object: Lookup a git object by its SHA1
204
208
:return: Inventory delta, as list
210
assert type(path) is unicode
207
212
for name, mode, hexsha in base_tree.iteritems():
208
213
if name in existing_children:
224
229
"""Import a git tree object into a bzr repository.
226
231
:param texts: VersionedFiles object to add to
227
:param path: Path in the tree
232
:param path: Path in the tree (str)
233
:param name: Name of the tree (str)
228
234
:param tree: A git tree object
229
235
:param base_inv: Base inventory against which to return inventory delta
230
236
:return: Inventory delta for this subtree
238
assert type(path) is str
239
assert type(name) is str
232
240
if base_hexsha == hexsha and base_mode == mode:
233
241
# If nothing has changed since the base revision, we're done
242
250
old_path = None # Newly appeared here
244
252
base_tree = lookup_object(base_hexsha)
245
old_path = path # Renames aren't supported yet
253
old_path = path.decode("utf-8") # Renames aren't supported yet
254
new_path = path.decode("utf-8")
246
255
if base_tree is None or type(base_tree) is not Tree:
247
256
ie.revision = revision_id
248
invdelta.append((old_path, path, ie.file_id, ie))
257
invdelta.append((old_path, new_path, ie.file_id, ie))
249
258
texts.insert_record_stream([
250
259
ChunkedContentFactory((ie.file_id, ie.revision), (), None, [])])
251
260
# Remember for next time
289
298
child_modes[child_path] = child_mode
290
299
# Remove any children that have disappeared
291
300
if base_tree is not None and type(base_tree) is Tree:
292
invdelta.extend(remove_disappeared_children(base_inv, old_path,
301
invdelta.extend(remove_disappeared_children(base_inv, old_path,
293
302
base_tree, existing_children, lookup_object))
294
store_updater.add_object(tree, ie)
303
store_updater.add_object(tree, ie, path)
295
304
return invdelta, child_modes
335
344
def import_git_commit(repo, mapping, head, lookup_object,
336
345
target_git_object_retriever, trees_cache):
337
346
o = lookup_object(head)
338
rev = mapping.import_commit(o,
347
rev, roundtrip_revid, verifiers = mapping.import_commit(o,
339
348
lambda x: target_git_object_retriever.lookup_git_sha(x)[1][0])
340
349
# We have to do this here, since we have to walk the tree and
341
350
# we need to make sure to import the blobs / trees with the right
350
359
base_tree = lookup_object(o.parents[0]).tree
351
360
base_mode = stat.S_IFDIR
352
361
store_updater = target_git_object_retriever._get_updater(rev)
353
store_updater.add_object(o, None)
354
lookup_file_id = mapping.get_fileid_map(lookup_object, o.tree).lookup_file_id
362
fileid_map = mapping.get_fileid_map(lookup_object, o.tree)
355
363
inv_delta, unusual_modes = import_git_tree(repo.texts,
356
mapping, "", u"", (base_tree, o.tree), base_inv,
364
mapping, "", "", (base_tree, o.tree), base_inv,
357
365
None, rev.revision_id, [p.inventory for p in parent_trees],
358
366
lookup_object, (base_mode, stat.S_IFDIR), store_updater,
367
fileid_map.lookup_file_id,
360
368
allow_submodules=getattr(repo._format, "supports_tree_reference", False))
361
store_updater.finish()
362
369
if unusual_modes != {}:
363
370
for path, mode in unusual_modes.iteritems():
364
371
warn_unusual_mode(rev.foreign_revid, path, mode)
369
376
basis_id = NULL_REVISION
371
378
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
372
inv_delta, rev.revision_id, rev.parent_ids,
379
inv_delta, rev.revision_id, rev.parent_ids, base_inv)
380
# FIXME: Check verifiers
381
testament = StrictTestament3(rev, inv)
382
calculated_verifiers = { "testament3-sha1": testament.as_sha1() }
383
if roundtrip_revid is not None:
384
original_revid = rev.revision_id
385
rev.revision_id = roundtrip_revid
386
if calculated_verifiers != verifiers:
387
trace.mutter("Testament SHA1 %r for %r did not match %r.",
388
calculated_verifiers["testament3-sha1"],
389
rev.revision_id, verifiers["testament3-sha1"])
390
rev.revision_id = original_revid
391
store_updater.add_object(o, calculated_verifiers, None)
392
store_updater.finish()
374
393
ret_tree = RevisionTree(repo, inv, rev.revision_id)
375
394
trees_cache.add(ret_tree)
376
395
repo.add_revision(rev.revision_id, rev)
411
430
if isinstance(o, Commit):
412
rev = mapping.import_commit(o, lambda x: None)
413
if repo.has_revision(rev.revision_id):
431
rev, roundtrip_revid, verifiers = mapping.import_commit(o,
433
if (repo.has_revision(rev.revision_id) or
434
(roundtrip_revid and repo.has_revision(roundtrip_revid))):
415
436
graph.append((o.id, o.parents))
416
437
heads.extend([p for p in o.parents if p not in checked])
469
490
"""See InterRepository.copy_content."""
470
491
self.fetch(revision_id, pb, find_ghosts=False)
472
def fetch(self, revision_id=None, pb=None, find_ghosts=False,
473
mapping=None, fetch_spec=None):
474
self.fetch_refs(revision_id=revision_id, pb=pb,
475
find_ghosts=find_ghosts, mapping=mapping, fetch_spec=fetch_spec)
478
494
class InterGitNonGitRepository(InterGitRepository):
479
495
"""Base InterRepository that copies revisions from a Git into a non-Git
482
def fetch_refs(self, revision_id=None, pb=None, find_ghosts=False,
498
def fetch_objects(self, determine_wants, mapping, pb=None, limit=None):
499
"""Fetch objects from a remote server.
501
:param determine_wants: determine_wants callback
502
:param mapping: BzrGitMapping to use
503
:param pb: Optional progress bar
504
:param limit: Maximum number of commits to import.
505
:return: Tuple with pack hint, last imported revision id and remote refs
507
raise NotImplementedError(self.fetch_objects)
509
def fetch(self, revision_id=None, pb=None, find_ghosts=False,
483
510
mapping=None, fetch_spec=None):
484
511
if mapping is None:
485
512
mapping = self.source.get_mapping()
489
516
interesting_heads = fetch_spec.heads
491
518
interesting_heads = None
493
519
def determine_wants(refs):
495
520
if interesting_heads is None:
496
521
ret = [sha for (ref, sha) in refs.iteritems() if not ref.endswith("^{}")]
498
523
ret = [self.source.lookup_bzr_revision_id(revid)[0] for revid in interesting_heads if revid not in (None, NULL_REVISION)]
499
524
return [rev for rev in ret if not self.target.has_revision(self.source.lookup_foreign_revision_id(rev))]
500
(pack_hint, _) = self.fetch_objects(determine_wants, mapping, pb)
525
(pack_hint, _, remote_refs) = self.fetch_objects(determine_wants, mapping, pb)
501
526
if pack_hint is not None and self.target._format.pack_compresses:
502
527
self.target.pack(hint=pack_hint)
506
531
_GIT_PROGRESS_RE = re.compile(r"(.*?): +(\d+)% \((\d+)/(\d+)\)")
514
539
pb.update(text, 0, 0)
542
class DetermineWantsRecorder(object):
544
def __init__(self, actual):
547
self.remote_refs = {}
549
def __call__(self, refs):
550
self.remote_refs = refs
551
self.wants = self.actual(refs)
517
555
class InterRemoteGitNonGitRepository(InterGitNonGitRepository):
518
556
"""InterRepository that copies revisions from a remote Git into a non-Git
527
565
return set(all_revs) - all_parents
529
567
def fetch_objects(self, determine_wants, mapping, pb=None, limit=None):
568
"""See `InterGitNonGitRepository`."""
530
569
def progress(text):
531
570
report_git_progress(pb, text)
532
571
store = BazaarObjectStore(self.target, mapping)
535
574
heads = self.get_target_heads()
536
575
graph_walker = store.get_graph_walker(
537
576
[store._lookup_revision_sha1(head) for head in heads])
540
def record_determine_wants(heads):
541
wants = determine_wants(heads)
542
recorded_wants.extend(wants)
577
wants_recorder = DetermineWantsRecorder(determine_wants)
547
581
create_pb = pb = ui.ui_factory.nested_progress_bar()
549
objects_iter = self.source.fetch_objects(record_determine_wants,
550
graph_walker, store.get_raw, progress)
551
return import_git_objects(self.target, mapping,
552
objects_iter, store, recorded_wants, pb, limit)
583
objects_iter = self.source.fetch_objects(
584
wants_recorder, graph_walker, store.get_raw,
586
(pack_hint, last_rev) = import_git_objects(self.target, mapping,
587
objects_iter, store, wants_recorder.wants, pb, limit)
588
return (pack_hint, last_rev, wants_recorder.remote_refs)
555
591
create_pb.finished()
572
608
def fetch_objects(self, determine_wants, mapping, pb=None, limit=None):
575
wants = determine_wants(self.source._git.get_refs())
609
"""See `InterGitNonGitRepository`."""
610
remote_refs = self.source._git.get_refs()
611
wants = determine_wants(remote_refs)
578
614
create_pb = pb = ui.ui_factory.nested_progress_bar()
581
617
self.target.lock_write()
583
return import_git_objects(self.target, mapping,
619
(pack_hint, last_rev) = import_git_objects(self.target, mapping,
584
620
self.source._git.object_store,
585
621
target_git_object_retriever, wants, pb, limit)
622
return (pack_hint, last_rev, remote_refs)
587
624
self.target.unlock()
607
644
graphwalker = self.target._git.get_graph_walker()
608
645
if (isinstance(self.source, LocalGitRepository) and
609
646
isinstance(self.target, LocalGitRepository)):
610
return self.source._git.fetch(self.target._git, determine_wants,
647
refs = self.source._git.fetch(self.target._git, determine_wants,
649
return (None, None, refs)
612
650
elif (isinstance(self.source, LocalGitRepository) and
613
651
isinstance(self.target, RemoteGitRepository)):
614
652
raise NotImplementedError
616
654
isinstance(self.target, LocalGitRepository)):
617
655
f, commit = self.target._git.object_store.add_thin_pack()
619
refs = self.source._git.fetch_pack(determine_wants,
620
graphwalker, f.write, progress)
657
refs = self.source.bzrdir.root_transport.fetch_pack(
658
determine_wants, graphwalker, f.write, progress)
660
return (None, None, refs)
627
665
raise AssertionError
629
def fetch_refs(self, revision_id=None, pb=None, find_ghosts=False,
667
def fetch(self, revision_id=None, pb=None, find_ghosts=False,
630
668
mapping=None, fetch_spec=None, branches=None):
631
669
if mapping is None:
632
670
mapping = self.source.get_mapping()
641
679
determine_wants = r.object_store.determine_wants_all
643
681
determine_wants = lambda x: [y for y in args if not y in r.object_store]
644
return self.fetch_objects(determine_wants, mapping)[0]
682
self.fetch_objects(determine_wants, mapping)
648
685
def is_compatible(source, target):