132
125
ie.text_size = None
133
126
ie.text_sha1 = None
135
ie.text_size = len(blob.data)
136
ie.text_sha1 = osutils.sha_string(blob.data)
128
ie.text_size = sum(imap(len, blob.chunked))
129
ie.text_sha1 = osutils.sha_strings(blob.chunked)
137
130
# Check what revision we should store
139
132
for pinv in parent_invs:
140
if pinv.revision_id == base_inv.revision_id:
149
if pie.text_sha1 == ie.text_sha1 and pie.executable == ie.executable and pie.symlink_target == ie.symlink_target:
137
if (pie.text_sha1 == ie.text_sha1 and
138
pie.executable == ie.executable and
139
pie.symlink_target == ie.symlink_target):
150
140
# found a revision in one of the parents to use
151
141
ie.revision = pie.revision
153
parent_keys.append((file_id, pie.revision))
143
parent_key = (file_id, pie.revision)
144
if not parent_key in parent_keys:
145
parent_keys.append(parent_key)
154
146
if ie.revision is None:
155
147
# Need to store a new revision
156
148
ie.revision = revision_id
157
assert file_id is not None
158
149
assert ie.revision is not None
159
texts.insert_record_stream([FulltextContentFactory((file_id, ie.revision), tuple(parent_keys), ie.text_sha1, blob.data)])
160
shamap = [(hexsha, "blob", (ie.file_id, ie.revision))]
163
if file_id in base_inv:
164
old_path = base_inv.id2path(file_id)
150
if ie.kind == 'symlink':
153
chunks = blob.chunked
154
texts.insert_record_stream([
155
ChunkedContentFactory((file_id, ie.revision),
156
tuple(parent_keys), ie.text_sha1, chunks)])
158
if base_hexsha is not None:
159
old_path = path # Renames are not supported yet
160
if stat.S_ISDIR(base_mode):
161
invdelta.extend(remove_disappeared_children(base_inv, old_path,
162
lookup_object(base_hexsha), [], lookup_object))
167
invdelta = [(old_path, path, file_id, ie)]
168
invdelta.extend(remove_disappeared_children(base_inv, base_ie, []))
169
return (invdelta, shamap)
172
def import_git_submodule(texts, mapping, path, hexsha, base_inv, parent_id,
173
revision_id, parent_invs, shagitmap, lookup_object):
174
raise NotImplementedError(import_git_submodule)
177
def remove_disappeared_children(base_inv, base_ie, existing_children):
178
if base_ie is None or base_ie.kind != 'directory':
165
invdelta.append((old_path, path, file_id, ie))
166
if base_hexsha != hexsha:
167
store_updater.add_object(blob, ie)
171
class SubmodulesRequireSubtrees(BzrError):
172
_fmt = """The repository you are fetching from contains submodules. To continue, upgrade your Bazaar repository to a format that supports nested trees, such as 'development-subtree'."""
176
def import_git_submodule(texts, mapping, path, name, (base_hexsha, hexsha),
177
base_inv, parent_id, revision_id, parent_invs, lookup_object,
178
(base_mode, mode), store_updater, lookup_file_id):
179
if base_hexsha == hexsha and base_mode == mode:
181
file_id = lookup_file_id(path)
182
ie = TreeReference(file_id, name.decode("utf-8"), parent_id)
183
ie.revision = revision_id
184
if base_hexsha is None:
188
ie.reference_revision = mapping.revision_id_foreign_to_bzr(hexsha)
189
texts.insert_record_stream([
190
ChunkedContentFactory((file_id, ie.revision), (), None, [])])
191
invdelta = [(oldpath, path, file_id, ie)]
195
def remove_disappeared_children(base_inv, path, base_tree, existing_children,
181
deletable = [v for k,v in base_ie.children.iteritems() if k not in existing_children]
184
ret.append((base_inv.id2path(ie.file_id), None, ie.file_id, None))
185
if ie.kind == "directory":
186
deletable.extend(ie.children.values())
198
for name, mode, hexsha in base_tree.iteritems():
199
if name in existing_children:
201
c_path = posixpath.join(path, name.decode("utf-8"))
202
ret.append((c_path, None, base_inv.path2id(c_path), None))
203
if stat.S_ISDIR(mode):
204
ret.extend(remove_disappeared_children(
205
base_inv, c_path, lookup_object(hexsha), [], lookup_object))
190
def import_git_tree(texts, mapping, path, hexsha, base_inv, parent_id,
191
revision_id, parent_invs, shagitmap, lookup_object):
209
def import_git_tree(texts, mapping, path, name, (base_hexsha, hexsha),
210
base_inv, parent_id, revision_id, parent_invs,
211
lookup_object, (base_mode, mode), store_updater,
212
lookup_file_id, allow_submodules=False):
192
213
"""Import a git tree object into a bzr repository.
194
215
:param texts: VersionedFiles object to add to
197
218
:param base_inv: Base inventory against which to return inventory delta
198
219
:return: Inventory delta for this subtree
221
if base_hexsha == hexsha and base_mode == mode:
222
# If nothing has changed since the base revision, we're done
201
file_id = mapping.generate_file_id(path)
225
file_id = lookup_file_id(path)
202
226
# We just have to hope this is indeed utf-8:
203
ie = InventoryDirectory(file_id, urlutils.basename(path.decode("utf-8")),
206
base_ie = base_inv[file_id]
208
# Newly appeared here
227
ie = InventoryDirectory(file_id, name.decode("utf-8"), parent_id)
228
tree = lookup_object(hexsha)
229
if base_hexsha is None:
231
old_path = None # Newly appeared here
233
base_tree = lookup_object(base_hexsha)
234
old_path = path # Renames aren't supported yet
235
if base_tree is None or type(base_tree) is not Tree:
210
236
ie.revision = revision_id
211
texts.add_lines((file_id, ie.revision), (), [])
212
invdelta.append((None, path, file_id, ie))
214
# See if this has changed at all
216
base_sha = shagitmap.lookup_tree(file_id, base_inv.revision_id)
220
if base_sha == hexsha:
221
# If nothing has changed since the base revision, we're done
223
if base_ie.kind != "directory":
224
ie.revision = revision_id
225
texts.add_lines((ie.file_id, ie.revision), (), [])
226
invdelta.append((base_inv.id2path(ie.file_id), path, ie.file_id, ie))
237
invdelta.append((old_path, path, ie.file_id, ie))
238
texts.insert_record_stream([
239
ChunkedContentFactory((ie.file_id, ie.revision), (), None, [])])
227
240
# Remember for next time
228
241
existing_children = set()
231
tree = lookup_object(hexsha)
232
for mode, name, child_hexsha in tree.entries():
233
basename = name.decode("utf-8")
234
existing_children.add(basename)
235
child_path = osutils.pathjoin(path, name)
236
if stat.S_ISDIR(mode):
237
subinvdelta, grandchildmodes, subshamap = import_git_tree(
238
texts, mapping, child_path, child_hexsha, base_inv,
239
file_id, revision_id, parent_invs, shagitmap, lookup_object)
240
invdelta.extend(subinvdelta)
241
child_modes.update(grandchildmodes)
242
shamap.extend(subshamap)
243
elif S_ISGITLINK(mode): # submodule
244
subinvdelta, grandchildmodes, subshamap = import_git_submodule(
245
texts, mapping, child_path, child_hexsha, base_inv,
246
file_id, revision_id, parent_invs, shagitmap, lookup_object)
247
invdelta.extend(subinvdelta)
248
child_modes.update(grandchildmodes)
249
shamap.extend(subshamap)
251
subinvdelta, subshamap = import_git_blob(texts, mapping,
252
child_path, child_hexsha, base_inv, file_id, revision_id,
253
parent_invs, shagitmap, lookup_object,
254
mode_is_executable(mode), stat.S_ISLNK(mode))
255
invdelta.extend(subinvdelta)
256
shamap.extend(subshamap)
257
if mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
243
for child_mode, name, child_hexsha in tree.entries():
244
existing_children.add(name)
245
child_path = posixpath.join(path, name)
246
if type(base_tree) is Tree:
248
child_base_mode, child_base_hexsha = base_tree[name]
250
child_base_hexsha = None
253
child_base_hexsha = None
255
if stat.S_ISDIR(child_mode):
256
subinvdelta, grandchildmodes = import_git_tree(texts, mapping,
257
child_path, name, (child_base_hexsha, child_hexsha), base_inv,
258
file_id, revision_id, parent_invs, lookup_object,
259
(child_base_mode, child_mode), store_updater, lookup_file_id,
260
allow_submodules=allow_submodules)
261
elif S_ISGITLINK(child_mode): # submodule
262
if not allow_submodules:
263
raise SubmodulesRequireSubtrees()
264
subinvdelta, grandchildmodes = import_git_submodule(texts, mapping,
265
child_path, name, (child_base_hexsha, child_hexsha), base_inv,
266
file_id, revision_id, parent_invs, lookup_object,
267
(child_base_mode, child_mode), store_updater, lookup_file_id)
269
subinvdelta = import_git_blob(texts, mapping, child_path, name,
270
(child_base_hexsha, child_hexsha), base_inv, file_id,
271
revision_id, parent_invs, lookup_object,
272
(child_base_mode, child_mode), store_updater, lookup_file_id)
274
child_modes.update(grandchildmodes)
275
invdelta.extend(subinvdelta)
276
if child_mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
258
277
stat.S_IFLNK, DEFAULT_FILE_MODE|0111):
259
child_modes[child_path] = mode
278
child_modes[child_path] = child_mode
260
279
# Remove any children that have disappeared
261
invdelta.extend(remove_disappeared_children(base_inv, base_ie, existing_children))
262
shamap.append((hexsha, "tree", (file_id, revision_id)))
263
return invdelta, child_modes, shamap
266
def import_git_objects(repo, mapping, object_iter, target_git_object_retriever,
280
if base_tree is not None and type(base_tree) is Tree:
281
invdelta.extend(remove_disappeared_children(base_inv, old_path,
282
base_tree, existing_children, lookup_object))
283
store_updater.add_object(tree, ie)
284
return invdelta, child_modes
287
def verify_commit_reconstruction(target_git_object_retriever, lookup_object,
288
o, rev, ret_tree, parent_trees, mapping, unusual_modes):
289
new_unusual_modes = mapping.export_unusual_file_modes(rev)
290
if new_unusual_modes != unusual_modes:
291
raise AssertionError("unusual modes don't match: %r != %r" % (
292
unusual_modes, new_unusual_modes))
293
# Verify that we can reconstruct the commit properly
294
rec_o = target_git_object_retriever._reconstruct_commit(rev, o.tree)
296
raise AssertionError("Reconstructed commit differs: %r != %r" % (
300
for path, obj, ie in _tree_to_objects(ret_tree, parent_trees,
301
target_git_object_retriever._cache.idmap, unusual_modes, mapping.BZR_DUMMY_FILE):
302
old_obj_id = tree_lookup_path(lookup_object, o.tree, path)[1]
304
if obj.id != old_obj_id:
305
diff.append((path, lookup_object(old_obj_id), obj))
306
for (path, old_obj, new_obj) in diff:
307
while (old_obj.type_name == "tree" and
308
new_obj.type_name == "tree" and
309
sorted(old_obj) == sorted(new_obj)):
311
if old_obj[name][0] != new_obj[name][0]:
312
raise AssertionError("Modes for %s differ: %o != %o" %
313
(path, old_obj[name][0], new_obj[name][0]))
314
if old_obj[name][1] != new_obj[name][1]:
315
# Found a differing child, delve deeper
316
path = posixpath.join(path, name)
317
old_obj = lookup_object(old_obj[name][1])
318
new_obj = new_objs[path]
320
raise AssertionError("objects differ for %s: %r != %r" % (path,
324
def import_git_commit(repo, mapping, head, lookup_object,
325
target_git_object_retriever, trees_cache):
326
o = lookup_object(head)
327
rev = mapping.import_commit(o)
328
# We have to do this here, since we have to walk the tree and
329
# we need to make sure to import the blobs / trees with the right
330
# path; this may involve adding them more than once.
331
parent_trees = trees_cache.revision_trees(rev.parent_ids)
332
if parent_trees == []:
333
base_inv = Inventory(root_id=None)
337
base_inv = parent_trees[0].inventory
338
base_tree = lookup_object(o.parents[0]).tree
339
base_mode = stat.S_IFDIR
340
store_updater = target_git_object_retriever._get_updater(rev)
341
store_updater.add_object(o, None)
342
lookup_file_id = mapping.get_fileid_map(lookup_object, o.tree).lookup_file_id
343
inv_delta, unusual_modes = import_git_tree(repo.texts,
344
mapping, "", u"", (base_tree, o.tree), base_inv,
345
None, rev.revision_id, [p.inventory for p in parent_trees],
346
lookup_object, (base_mode, stat.S_IFDIR), store_updater,
348
allow_submodules=getattr(repo._format, "supports_tree_reference", False))
349
store_updater.finish()
350
if unusual_modes != {}:
351
for path, mode in unusual_modes.iteritems():
352
warn_unusual_mode(rev.foreign_revid, path, mode)
353
mapping.import_unusual_file_modes(rev, unusual_modes)
355
basis_id = rev.parent_ids[0]
357
basis_id = NULL_REVISION
359
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
360
inv_delta, rev.revision_id, rev.parent_ids,
362
ret_tree = RevisionTree(repo, inv, rev.revision_id)
363
trees_cache.add(ret_tree)
364
repo.add_revision(rev.revision_id, rev)
365
if "verify" in debug.debug_flags:
366
verify_commit_reconstruction(target_git_object_retriever,
367
lookup_object, o, rev, ret_tree, parent_trees, mapping,
371
def import_git_objects(repo, mapping, object_iter,
372
target_git_object_retriever, heads, pb=None, limit=None):
268
373
"""Import a set of git objects into a bzr repository.
270
375
:param repo: Target Bazaar repository
271
376
:param mapping: Mapping to use
272
377
:param object_iter: Iterator over Git objects.
378
:return: Tuple with pack hints and last imported revision id
274
380
def lookup_object(sha):
276
382
return object_iter[sha]
278
384
return target_git_object_retriever[sha]
279
# TODO: a more (memory-)efficient implementation of this
285
parent_invs_cache = LRUCache(50)
387
heads = list(set(heads))
388
trees_cache = LRUTreeCache(repo)
286
389
# Find and convert commit objects
288
391
if pb is not None:
298
401
if repo.has_revision(rev.revision_id):
300
403
squash_revision(repo, rev)
301
root_trees[rev.revision_id] = o.tree
302
revisions[rev.revision_id] = rev
303
graph.append((rev.revision_id, rev.parent_ids))
304
target_git_object_retriever._idmap.add_entry(o.id, "commit",
305
(rev.revision_id, o.tree))
404
graph.append((o.id, o.parents))
306
405
heads.extend([p for p in o.parents if p not in checked])
307
406
elif isinstance(o, Tag):
308
heads.append(o.object[1])
407
if o.object[1] not in checked:
408
heads.append(o.object[1])
310
410
trace.warning("Unable to import head object %r" % o)
312
413
# Order the revisions
313
414
# Create the inventory objects
314
for i, revid in enumerate(topo_sort(graph)):
316
pb.update("fetching revisions", i, len(graph))
317
rev = revisions[revid]
318
# We have to do this here, since we have to walk the tree and
319
# we need to make sure to import the blobs / trees with the right
320
# path; this may involve adding them more than once.
322
for parent_id in rev.parent_ids:
416
revision_ids = topo_sort(graph)
418
if limit is not None:
419
revision_ids = revision_ids[:limit]
421
for offset in range(0, len(revision_ids), batch_size):
422
target_git_object_retriever.start_write_group()
424
repo.start_write_group()
324
parent_invs.append(parent_invs_cache[parent_id])
326
parent_inv = repo.get_inventory(parent_id)
327
parent_invs.append(parent_inv)
328
parent_invs_cache[parent_id] = parent_inv
329
if parent_invs == []:
330
base_inv = Inventory(root_id=None)
426
for i, head in enumerate(
427
revision_ids[offset:offset+batch_size]):
429
pb.update("fetching revisions", offset+i,
431
import_git_commit(repo, mapping, head, lookup_object,
432
target_git_object_retriever, trees_cache)
435
repo.abort_write_group()
438
hint = repo.commit_write_group()
440
pack_hints.extend(hint)
442
target_git_object_retriever.abort_write_group()
332
base_inv = parent_invs[0]
333
inv_delta, unusual_modes, shamap = import_git_tree(repo.texts,
334
mapping, "", root_trees[revid], base_inv, None, revid,
335
parent_invs, target_git_object_retriever._idmap, lookup_object)
336
target_git_object_retriever._idmap.add_entries(shamap)
337
if unusual_modes != {}:
338
for path, mode in unusual_modes.iteritems():
339
warn_unusual_mode(rev.foreign_revid, path, mode)
340
mapping.import_unusual_file_modes(rev, unusual_modes)
342
basis_id = rev.parent_ids[0]
344
basis_id = NULL_REVISION
345
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
346
inv_delta, rev.revision_id, rev.parent_ids)
347
parent_invs_cache[rev.revision_id] = inv
348
repo.add_revision(rev.revision_id, rev)
349
if "verify" in debug.debug_flags:
350
new_unusual_modes = mapping.export_unusual_file_modes(rev)
351
if new_unusual_modes != unusual_modes:
352
raise AssertionError("unusual modes don't match: %r != %r" % (unusual_modes, new_unusual_modes))
353
objs = inventory_to_tree_and_blobs(inv, repo.texts, mapping, unusual_modes)
354
for sha1, newobj, path in objs:
355
assert path is not None
356
oldobj = tree_lookup_path(lookup_object, root_trees[revid], path)
358
raise AssertionError("%r != %r in %s" % (oldobj, newobj, path))
360
target_git_object_retriever._idmap.commit()
445
target_git_object_retriever.commit_write_group()
446
return pack_hints, last_imported
363
449
class InterGitRepository(InterRepository):
401
487
ret = [mapping.revision_id_bzr_to_foreign(revid)[0] for revid in interesting_heads if revid not in (None, NULL_REVISION)]
402
488
return [rev for rev in ret if not self.target.has_revision(mapping.revision_id_foreign_to_bzr(rev))]
403
self.fetch_objects(determine_wants, mapping, pb)
489
(pack_hint, _) = self.fetch_objects(determine_wants, mapping, pb)
490
if pack_hint is not None and self.target._format.pack_compresses:
491
self.target.pack(hint=pack_hint)
492
if interesting_heads is not None:
493
present_interesting_heads = self.target.has_revisions(interesting_heads)
494
missing_interesting_heads = set(interesting_heads) - present_interesting_heads
495
if missing_interesting_heads:
496
raise AssertionError("Missing interesting heads: %r" %
497
missing_interesting_heads)
404
498
return self._refs
501
_GIT_PROGRESS_RE = re.compile(r"(.*?): +(\d+)% \((\d+)/(\d+)\)")
502
def report_git_progress(pb, text):
503
text = text.rstrip("\r\n")
504
g = _GIT_PROGRESS_RE.match(text)
506
(text, pct, current, total) = g.groups()
507
pb.update(text, int(current), int(total))
509
pb.update(text, 0, 0)
407
512
class InterRemoteGitNonGitRepository(InterGitNonGitRepository):
408
"""InterRepository that copies revisions from a remote Git into a non-Git
513
"""InterRepository that copies revisions from a remote Git into a non-Git
411
def fetch_objects(self, determine_wants, mapping, pb=None):
516
def get_target_heads(self):
517
# FIXME: This should be more efficient
518
all_revs = self.target.all_revision_ids()
519
parent_map = self.target.get_parent_map(all_revs)
521
map(all_parents.update, parent_map.itervalues())
522
return set(all_revs) - all_parents
524
def fetch_objects(self, determine_wants, mapping, pb=None, limit=None):
412
525
def progress(text):
413
pb.update("git: %s" % text.rstrip("\r\n"), 0, 0)
526
report_git_progress(pb, text)
414
527
store = BazaarObjectStore(self.target, mapping)
415
528
self.target.lock_write()
417
heads = self.target.get_graph().heads(self.target.all_revision_ids())
530
heads = self.get_target_heads()
418
531
graph_walker = store.get_graph_walker(
419
532
[store._lookup_revision_sha1(head) for head in heads])
420
533
recorded_wants = []