55
58
from bzrlib.tsort import (
61
from bzrlib.versionedfile import (
62
FulltextContentFactory,
59
from bzrlib.plugins.git.converter import (
62
65
from bzrlib.plugins.git.mapping import (
67
inventory_to_tree_and_blobs,
73
from bzrlib.plugins.git.object_store import (
76
from bzrlib.plugins.git.remote import (
65
79
from bzrlib.plugins.git.repository import (
68
81
GitRepositoryFormat,
70
from bzrlib.plugins.git.remote import (
75
class BzrFetchGraphWalker(object):
76
"""GraphWalker implementation that uses a Bazaar repository."""
78
def __init__(self, repository, mapping):
79
self.repository = repository
80
self.mapping = mapping
82
self.heads = set(repository.all_revision_ids())
86
return iter(self.next, None)
89
revid = self.mapping.revision_id_foreign_to_bzr(sha)
92
def remove(self, revid):
94
if revid in self.heads:
95
self.heads.remove(revid)
96
if revid in self.parents:
97
for p in self.parents[revid]:
102
ret = self.heads.pop()
103
ps = self.repository.get_parent_map([ret])[ret]
104
self.parents[ret] = ps
105
self.heads.update([p for p in ps if not p in self.done])
108
return self.mapping.revision_id_bzr_to_foreign(ret)[0]
109
except InvalidRevisionId:
114
86
def import_git_blob(texts, mapping, path, hexsha, base_inv, parent_id,
127
99
cls = InventoryFile
128
100
# We just have to hope this is indeed utf-8:
129
ie = cls(file_id, urlutils.basename(path).decode("utf-8"),
101
ie = cls(file_id, urlutils.basename(path).decode("utf-8"), parent_id)
131
102
ie.executable = executable
132
103
# See if this has changed at all
134
base_sha = shagitmap.lookup_blob(file_id, base_inv.revision_id)
105
base_ie = base_inv[file_id]
138
if (base_sha == hexsha and base_inv[file_id].executable == ie.executable
139
and base_inv[file_id].kind == ie.kind):
140
# If nothing has changed since the base revision, we're done
142
if base_sha == hexsha:
143
ie.text_size = base_inv[file_id].text_size
144
ie.text_sha1 = base_inv[file_id].text_sha1
145
ie.symlink_target = base_inv[file_id].symlink_target
146
ie.revision = base_inv[file_id].revision
111
base_sha = shagitmap.lookup_blob(file_id, base_ie.revision)
115
if (base_sha == hexsha and base_ie.executable == ie.executable
116
and base_ie.kind == ie.kind):
117
# If nothing has changed since the base revision, we're done
119
if base_sha == hexsha and base_ie.kind == ie.kind:
120
ie.text_size = base_ie.text_size
121
ie.text_sha1 = base_ie.text_sha1
122
ie.symlink_target = base_ie.symlink_target
123
if ie.executable == base_ie.executable:
124
ie.revision = base_ie.revision
126
blob = lookup_object(hexsha)
148
128
blob = lookup_object(hexsha)
149
129
if ie.kind == "symlink":
150
131
ie.symlink_target = blob.data
151
132
ie.text_size = None
152
133
ie.text_sha1 = None
156
137
# Check what revision we should store
158
139
for pinv in parent_invs:
159
if not file_id in pinv:
161
if pinv[file_id].text_sha1 == ie.text_sha1:
140
if pinv.revision_id == base_inv.revision_id:
149
if pie.text_sha1 == ie.text_sha1 and pie.executable == ie.executable and pie.symlink_target == ie.symlink_target:
162
150
# found a revision in one of the parents to use
163
ie.revision = pinv[file_id].revision
151
ie.revision = pie.revision
165
parent_keys.append((file_id, pinv[file_id].revision))
153
parent_keys.append((file_id, pie.revision))
166
154
if ie.revision is None:
167
155
# Need to store a new revision
168
156
ie.revision = revision_id
169
157
assert file_id is not None
170
158
assert ie.revision is not None
171
texts.add_lines((file_id, ie.revision), parent_keys,
172
osutils.split_lines(blob.data))
173
if "verify" in debug.debug_flags:
174
assert text_to_blob(blob.data).id == hexsha
175
shagitmap.add_entry(hexsha, "blob", (ie.file_id, ie.revision))
159
texts.insert_record_stream([FulltextContentFactory((file_id, ie.revision), tuple(parent_keys), ie.text_sha1, blob.data)])
160
shamap = [(hexsha, "blob", (ie.file_id, ie.revision))]
176
163
if file_id in base_inv:
177
164
old_path = base_inv.id2path(file_id)
180
return [(old_path, path, file_id, ie)]
167
invdelta = [(old_path, path, file_id, ie)]
168
invdelta.extend(remove_disappeared_children(base_inv, base_ie, []))
169
return (invdelta, shamap)
172
def import_git_submodule(texts, mapping, path, hexsha, base_inv, parent_id,
173
revision_id, parent_invs, shagitmap, lookup_object):
174
raise NotImplementedError(import_git_submodule)
177
def remove_disappeared_children(base_inv, base_ie, existing_children):
178
if base_ie is None or base_ie.kind != 'directory':
181
deletable = [v for k,v in base_ie.children.iteritems() if k not in existing_children]
184
ret.append((base_inv.id2path(ie.file_id), None, ie.file_id, None))
185
if ie.kind == "directory":
186
deletable.extend(ie.children.values())
183
190
def import_git_tree(texts, mapping, path, hexsha, base_inv, parent_id,
190
197
:param base_inv: Base inventory against which to return inventory delta
191
198
:return: Inventory delta for this subtree
194
201
file_id = mapping.generate_file_id(path)
195
202
# We just have to hope this is indeed utf-8:
196
203
ie = InventoryDirectory(file_id, urlutils.basename(path.decode("utf-8")),
198
if not file_id in base_inv:
206
base_ie = base_inv[file_id]
199
208
# Newly appeared here
200
210
ie.revision = revision_id
201
texts.add_lines((file_id, ie.revision), [], [])
202
ret.append((None, path, file_id, ie))
211
texts.add_lines((file_id, ie.revision), (), [])
212
invdelta.append((None, path, file_id, ie))
204
214
# See if this has changed at all
206
base_sha = shagitmap.lookup_tree(path, base_inv.revision_id)
216
base_sha = shagitmap.lookup_tree(file_id, base_inv.revision_id)
210
220
if base_sha == hexsha:
211
221
# If nothing has changed since the base revision, we're done
223
if base_ie.kind != "directory":
224
ie.revision = revision_id
225
texts.add_lines((ie.file_id, ie.revision), (), [])
226
invdelta.append((base_inv.id2path(ie.file_id), path, ie.file_id, ie))
213
227
# Remember for next time
214
228
existing_children = set()
215
if "verify" in debug.debug_flags:
218
shagitmap.add_entry(hexsha, "tree", (file_id, revision_id))
219
231
tree = lookup_object(hexsha)
220
for mode, name, hexsha in tree.entries():
221
entry_kind = (mode & 0700000) / 0100000
232
for mode, name, child_hexsha in tree.entries():
222
233
basename = name.decode("utf-8")
223
234
existing_children.add(basename)
227
child_path = urlutils.join(path, name)
229
ret.extend(import_git_tree(texts, mapping, child_path, hexsha, base_inv,
230
file_id, revision_id, parent_invs, shagitmap, lookup_object))
231
elif entry_kind == 1:
232
fs_mode = mode & 0777
233
file_kind = (mode & 070000) / 010000
234
if file_kind == 0: # regular file
239
raise AssertionError("Unknown file kind, mode=%r" % (mode,))
240
ret.extend(import_git_blob(texts, mapping, child_path, hexsha, base_inv,
241
file_id, revision_id, parent_invs, shagitmap, lookup_object,
242
bool(fs_mode & 0111), symlink))
244
raise AssertionError("Unknown object kind, perms=%r." % (mode,))
235
child_path = osutils.pathjoin(path, name)
236
if stat.S_ISDIR(mode):
237
subinvdelta, grandchildmodes, subshamap = import_git_tree(
238
texts, mapping, child_path, child_hexsha, base_inv,
239
file_id, revision_id, parent_invs, shagitmap, lookup_object)
240
invdelta.extend(subinvdelta)
241
child_modes.update(grandchildmodes)
242
shamap.extend(subshamap)
243
elif S_ISGITLINK(mode): # submodule
244
subinvdelta, grandchildmodes, subshamap = import_git_submodule(
245
texts, mapping, child_path, child_hexsha, base_inv,
246
file_id, revision_id, parent_invs, shagitmap, lookup_object)
247
invdelta.extend(subinvdelta)
248
child_modes.update(grandchildmodes)
249
shamap.extend(subshamap)
251
subinvdelta, subshamap = import_git_blob(texts, mapping,
252
child_path, child_hexsha, base_inv, file_id, revision_id,
253
parent_invs, shagitmap, lookup_object,
254
mode_is_executable(mode), stat.S_ISLNK(mode))
255
invdelta.extend(subinvdelta)
256
shamap.extend(subshamap)
257
if mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
258
stat.S_IFLNK, DEFAULT_FILE_MODE|0111):
259
child_modes[child_path] = mode
245
260
# Remove any children that have disappeared
246
if file_id in base_inv:
247
deletable = [v for k,v in base_inv[file_id].children.iteritems() if k not in existing_children]
250
ret.append((base_inv.id2path(ie.file_id), None, ie.file_id, None))
251
if ie.kind == "directory":
252
deletable.extend(ie.children.values())
261
invdelta.extend(remove_disappeared_children(base_inv, base_ie, existing_children))
262
shamap.append((hexsha, "tree", (file_id, revision_id)))
263
return invdelta, child_modes, shamap
256
266
def import_git_objects(repo, mapping, object_iter, target_git_object_retriever,
258
268
"""Import a set of git objects into a bzr repository.
260
:param repo: Bazaar repository
270
:param repo: Target Bazaar repository
261
271
:param mapping: Mapping to use
262
272
:param object_iter: Iterator over Git objects.
274
def lookup_object(sha):
276
return object_iter[sha]
278
return target_git_object_retriever[sha]
264
279
# TODO: a more (memory-)efficient implementation of this
319
330
base_inv = Inventory(root_id=None)
321
332
base_inv = parent_invs[0]
322
inv_delta = import_git_tree(repo.texts, mapping, "",
323
root_trees[revid], base_inv, None, revid, parent_invs,
324
target_git_object_retriever._idmap, lookup_object)
333
inv_delta, unusual_modes, shamap = import_git_tree(repo.texts,
334
mapping, "", root_trees[revid], base_inv, None, revid,
335
parent_invs, target_git_object_retriever._idmap, lookup_object)
336
target_git_object_retriever._idmap.add_entries(shamap)
337
if unusual_modes != {}:
338
for path, mode in unusual_modes.iteritems():
339
warn_unusual_mode(rev.foreign_revid, path, mode)
340
mapping.import_unusual_file_modes(rev, unusual_modes)
326
342
basis_id = rev.parent_ids[0]
327
343
except IndexError:
330
346
inv_delta, rev.revision_id, rev.parent_ids)
331
347
parent_invs_cache[rev.revision_id] = inv
332
348
repo.add_revision(rev.revision_id, rev)
349
if "verify" in debug.debug_flags:
350
new_unusual_modes = mapping.export_unusual_file_modes(rev)
351
if new_unusual_modes != unusual_modes:
352
raise AssertionError("unusual modes don't match: %r != %r" % (unusual_modes, new_unusual_modes))
353
objs = inventory_to_tree_and_blobs(inv, repo.texts, mapping, unusual_modes)
354
for sha1, newobj, path in objs:
355
assert path is not None
356
oldobj = tree_lookup_path(lookup_object, root_trees[revid], path)
358
raise AssertionError("%r != %r in %s" % (oldobj, newobj, path))
333
360
target_git_object_retriever._idmap.commit()
336
class InterGitNonGitRepository(InterRepository):
337
"""Base InterRepository that copies revisions from a Git into a non-Git
363
class InterGitRepository(InterRepository):
340
365
_matching_repo_format = GitRepositoryFormat()
382
411
def fetch_objects(self, determine_wants, mapping, pb=None):
383
412
def progress(text):
384
413
pb.update("git: %s" % text.rstrip("\r\n"), 0, 0)
385
graph_walker = BzrFetchGraphWalker(self.target, mapping)
388
create_pb = pb = ui.ui_factory.nested_progress_bar()
389
target_git_object_retriever = BazaarObjectStore(self.target, mapping)
414
store = BazaarObjectStore(self.target, mapping)
415
self.target.lock_write()
417
heads = self.target.get_graph().heads(self.target.all_revision_ids())
418
graph_walker = store.get_graph_walker(
419
[store._lookup_revision_sha1(head) for head in heads])
392
def record_determine_wants(heads):
393
wants = determine_wants(heads)
394
recorded_wants.extend(wants)
422
def record_determine_wants(heads):
423
wants = determine_wants(heads)
424
recorded_wants.extend(wants)
398
self.target.lock_write()
429
create_pb = pb = ui.ui_factory.nested_progress_bar()
400
431
self.target.start_write_group()
402
433
objects_iter = self.source.fetch_objects(
403
record_determine_wants,
405
target_git_object_retriever.get_raw,
434
record_determine_wants, graph_walker,
435
store.get_raw, progress)
407
436
import_git_objects(self.target, mapping, objects_iter,
408
target_git_object_retriever, recorded_wants, pb)
437
store, recorded_wants, pb)
410
439
self.target.commit_write_group()
418
447
def is_compatible(source, target):