75
68
self._cache = lru_cache.LRUSizeCache(max_size=MAX_TREE_CACHE_SIZE,
76
69
after_cleanup_size=None, compute_size=approx_tree_size)
78
def revision_tree(self, revid):
71
def revision_tree(self, revid):
80
tree = self._cache[revid]
73
return self._cache[revid]
82
75
tree = self.repository.revision_tree(revid)
84
assert tree.get_revision_id() == tree.inventory.revision_id
87
79
def iter_revision_trees(self, revids):
92
tree = self._cache[revid]
96
assert tree.get_revision_id() == revid
97
assert tree.inventory.revision_id == revid
99
for tree in self.repository.revision_trees(todo):
80
trees = dict([(k, self._cache.get(k)) for k in revids])
81
for tree in self.repository.revision_trees(
82
[r for r, v in trees.iteritems() if v is None]):
100
83
trees[tree.get_revision_id()] = tree
102
85
return (trees[r] for r in revids)
108
91
self._cache.add(tree.get_revision_id(), tree)
111
def _find_missing_bzr_revids(get_parent_map, want, have):
112
"""Find the revisions that have to be pushed.
114
:param get_parent_map: Function that returns the parents for a sequence
116
:param want: Revisions the target wants
117
:param have: Revisions the target already has
118
:return: Set of revisions to fetch
120
pending = want - have
124
processed.update(pending)
125
next_map = get_parent_map(pending)
127
for item in next_map.iteritems():
131
next_pending.update(p for p in item[1] if p not in processed)
132
pending = next_pending
133
if NULL_REVISION in todo:
134
todo.remove(NULL_REVISION)
138
94
def _check_expected_sha(expected_sha, object):
139
95
"""Check whether an object matches an expected SHA.
159
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes,
160
dummy_file_name=None):
115
def _tree_to_objects(tree, parent_trees, idmap, unusual_modes):
161
116
"""Iterate over the objects that were introduced in a revision.
163
118
:param idmap: id map
164
:param parent_trees: Parent revision trees
165
:param unusual_modes: Unusual file modes dictionary
166
:param dummy_file_name: File name to use for dummy files
167
in empty directories. None to skip empty directories
119
:param unusual_modes: Unusual file modes
168
120
:return: Yields (path, object, ie) entries
189
141
pie.symlink_target == ie.symlink_target):
193
# Find all the changed blobs
194
144
for (file_id, path, changed_content, versioned, parent, name, kind,
195
145
executable) in tree.iter_changes(base_tree):
196
146
if kind[1] == "file":
197
147
ie = tree.inventory[file_id]
198
148
if changed_content:
200
151
pie = find_unchanged_parent_ie(ie, other_parent_trees)
205
shamap[ie.file_id] = idmap.lookup_blob_id(
206
pie.file_id, pie.revision)
210
blob.data = tree.get_file_text(ie.file_id)
211
shamap[ie.file_id] = blob.id
155
shamap[ie.file_id] = idmap.lookup_blob_id(
156
pie.file_id, pie.revision)
212
157
if not file_id in shamap:
213
158
new_blobs.append((path[1], ie))
214
new_trees[posixpath.dirname(path[1])] = parent[1]
159
new_trees[urlutils.dirname(path[1])] = parent[1]
215
160
elif kind[1] == "symlink":
216
161
ie = tree.inventory[file_id]
217
162
if changed_content:
221
166
find_unchanged_parent_ie(ie, other_parent_trees)
223
168
yield path[1], blob, ie
224
new_trees[posixpath.dirname(path[1])] = parent[1]
169
new_trees[urlutils.dirname(path[1])] = parent[1]
225
170
elif kind[1] not in (None, "directory"):
226
171
raise AssertionError(kind[1])
227
if (path[0] not in (None, "") and
228
parent[0] in tree.inventory and
229
tree.inventory[parent[0]].kind == "directory"):
231
new_trees[posixpath.dirname(path[0])] = parent[0]
172
if path[0] is not None:
173
new_trees[urlutils.dirname(path[0])] = parent[0]
233
# Fetch contents of the blobs that were changed
234
175
for (path, ie), chunks in tree.iter_files_bytes(
235
176
[(ie.file_id, (path, ie)) for (path, ie) in new_blobs]):
238
179
yield path, obj, ie
239
180
shamap[ie.file_id] = obj.id
241
for path in unusual_modes:
242
parent_path = posixpath.dirname(path)
243
new_trees[parent_path] = tree.path2id(parent_path)
182
for fid in unusual_modes:
183
new_trees[tree.id2path(fid)] = tree.inventory[fid].parent_id
247
187
items = new_trees.items()
249
189
for path, file_id in items:
250
parent_id = tree.inventory[file_id].parent_id
191
parent_id = tree.inventory[file_id].parent_id
192
except errors.NoSuchId:
193
# Directory was removed recursively perhaps ?
251
195
if parent_id is not None:
252
196
parent_path = urlutils.dirname(path)
253
197
new_trees[parent_path] = parent_id
342
283
self._update_sha_map()
343
284
return iter(self._cache.idmap.sha1s())
345
def _reconstruct_commit(self, rev, tree_sha, roundtrip, verifiers):
346
"""Reconstruct a Commit object.
348
:param rev: Revision object
349
:param tree_sha: SHA1 of the root tree object
350
:param roundtrip: Whether or not to roundtrip bzr metadata
351
:param verifiers: Verifiers for the commits
352
:return: Commit object
286
def _reconstruct_commit(self, rev, tree_sha):
354
287
def parent_lookup(revid):
356
289
return self._lookup_revision_sha1(revid)
357
290
except errors.NoSuchRevision:
291
trace.warning("Ignoring ghost parent %s", revid)
359
return self.mapping.export_commit(rev, tree_sha, parent_lookup,
360
roundtrip, verifiers)
362
def _create_fileid_map_blob(self, inv):
363
# FIXME: This can probably be a lot more efficient,
364
# not all files necessarily have to be processed.
366
for (path, ie) in inv.iter_entries():
367
if self.mapping.generate_file_id(path) != ie.file_id:
368
file_ids[path] = ie.file_id
369
return self.mapping.export_fileid_map(file_ids)
371
def _revision_to_objects(self, rev, tree, roundtrip):
372
"""Convert a revision to a set of git objects.
374
:param rev: Bazaar revision object
375
:param tree: Bazaar revision tree
376
:param roundtrip: Whether to roundtrip all Bazaar revision data
293
return self.mapping.export_commit(rev, tree_sha, parent_lookup)
295
def _revision_to_objects(self, rev, tree):
378
296
unusual_modes = extract_unusual_modes(rev)
379
297
present_parents = self.repository.has_revisions(rev.parent_ids)
380
298
parent_trees = self.tree_cache.revision_trees(
381
299
[p for p in rev.parent_ids if p in present_parents])
383
301
for path, obj, ie in _tree_to_objects(tree, parent_trees,
384
self._cache.idmap, unusual_modes, self.mapping.BZR_DUMMY_FILE):
302
self._cache.idmap, unusual_modes):
388
# Don't yield just yet
391
if root_tree is None:
392
307
# Pointless commit - get the tree sha elsewhere
393
308
if not rev.parent_ids:
396
311
base_sha1 = self._lookup_revision_sha1(rev.parent_ids[0])
397
root_tree = self[self[base_sha1].tree]
398
root_ie = tree.inventory.root
399
if roundtrip and self.mapping.BZR_FILE_IDS_FILE is not None:
400
b = self._create_fileid_map_blob(tree.inventory)
402
root_tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
403
yield self.mapping.BZR_FILE_IDS_FILE, b, None
404
yield "", root_tree, root_ie
406
testament3 = StrictTestament3(rev, tree.inventory)
407
verifiers = { "testament3-sha1": testament3.as_sha1() }
410
commit_obj = self._reconstruct_commit(rev, root_tree.id,
411
roundtrip=roundtrip, verifiers=verifiers)
312
tree_sha = self[base_sha1].tree
313
commit_obj = self._reconstruct_commit(rev, tree_sha)
413
315
foreign_revid, mapping = mapping_registry.parse_revision_id(
425
327
rev = self.repository.get_revision(revid)
426
328
tree = self.tree_cache.revision_tree(rev.revision_id)
427
329
updater = self._get_updater(rev)
428
for path, obj, ie in self._revision_to_objects(rev, tree,
430
if isinstance(obj, Commit):
431
testament3 = StrictTestament3(rev, tree.inventory)
432
ie = { "testament3-sha1": testament3.as_sha1() }
433
updater.add_object(obj, ie, path)
330
for path, obj, ie in self._revision_to_objects(rev, tree):
331
updater.add_object(obj, ie)
434
332
commit_obj = updater.finish()
435
333
return commit_obj.id
483
381
[(entry.file_id, entry.revision, None)]).next().id
485
383
raise AssertionError("unknown entry kind '%s'" % entry.kind)
486
tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes,
487
self.mapping.BZR_DUMMY_FILE)
488
if (inv.root.file_id == fileid and
489
self.mapping.BZR_FILE_IDS_FILE is not None):
490
b = self._create_fileid_map_blob(inv)
491
# If this is the root tree, add the file ids
492
tree[self.mapping.BZR_FILE_IDS_FILE] = ((stat.S_IFREG | 0644), b.id)
384
tree = directory_to_tree(inv[fileid], get_ie_sha1, unusual_modes)
493
385
_check_expected_sha(expected_sha, tree)
504
396
def _lookup_revision_sha1(self, revid):
505
397
"""Return the SHA1 matching a Bazaar revision."""
506
from dulwich.protocol import ZERO_SHA
507
398
if revid == NULL_REVISION:
510
401
return self._cache.idmap.lookup_commit(revid)
513
404
return mapping_registry.parse_revision_id(revid)[0]
514
405
except errors.InvalidRevisionId:
515
self.repository.lock_read()
517
self._update_sha_map(revid)
519
self.repository.unlock()
406
self._update_sha_map(revid)
520
407
return self._cache.idmap.lookup_commit(revid)
522
409
def get_raw(self, sha):
545
def lookup_git_shas(self, shas, update_map=True):
546
from dulwich.protocol import ZERO_SHA
550
ret[sha] = ("commit", (NULL_REVISION, None, {}))
553
ret[sha] = self._cache.idmap.lookup_git_sha(sha)
556
# if not, see if there are any unconverted revisions and add
557
# them to the map, search for sha in map again
558
self._update_sha_map()
561
ret[sha] = self._cache.idmap.lookup_git_sha(sha)
566
def lookup_git_sha(self, sha, update_map=True):
567
return self.lookup_git_shas([sha], update_map=update_map)[sha]
432
def _lookup_git_sha(self, sha):
433
# See if sha is in map
435
return self._cache.idmap.lookup_git_sha(sha)
437
# if not, see if there are any unconverted revisions and add them
438
# to the map, search for sha in map again
439
self._update_sha_map()
440
return self._cache.idmap.lookup_git_sha(sha)
569
442
def __getitem__(self, sha):
570
443
if self._cache.content_cache is not None:
572
445
return self._cache.content_cache[sha]
575
(type, type_data) = self.lookup_git_sha(sha)
448
(type, type_data) = self._lookup_git_sha(sha)
576
449
# convert object to git object
577
450
if type == "commit":
578
(revid, tree_sha, verifiers) = type_data
451
(revid, tree_sha) = type_data
580
453
rev = self.repository.get_revision(revid)
581
454
except errors.NoSuchRevision:
582
455
trace.mutter('entry for %s %s in shamap: %r, but not found in '
583
456
'repository', type, sha, type_data)
584
457
raise KeyError(sha)
585
commit = self._reconstruct_commit(rev, tree_sha, roundtrip=True,
458
commit = self._reconstruct_commit(rev, tree_sha)
587
459
_check_expected_sha(sha, commit)
589
461
elif type == "blob":
607
479
raise AssertionError("Unknown object type '%s'" % type)
609
def generate_lossy_pack_contents(self, have, want, progress=None,
611
return self.generate_pack_contents(have, want, progress, get_tagged,
614
def generate_pack_contents(self, have, want, progress=None,
615
get_tagged=None, lossy=False):
481
def generate_pack_contents(self, have, want, progress=None, get_tagged=None):
616
482
"""Iterate over the contents of a pack file.
618
484
:param have: List of SHA1s of objects that should not be sent
619
485
:param want: List of SHA1s of objects that should be sent
621
487
processed = set()
622
ret = self.lookup_git_shas(have + want)
623
488
for commit_sha in have:
625
(type, (revid, tree_sha)) = ret[commit_sha]
490
(type, (revid, tree_sha)) = self._lookup_git_sha(commit_sha)
632
497
for commit_sha in want:
633
498
if commit_sha in have:
636
(type, (revid, tree_sha)) = ret[commit_sha]
640
assert type == "commit"
643
todo = _find_missing_bzr_revids(self.repository.get_parent_map,
500
(type, (revid, tree_sha)) = self._lookup_git_sha(commit_sha)
501
assert type == "commit"
505
processed.update(pending)
506
next_map = self.repository.get_parent_map(pending)
508
for item in next_map.iteritems():
510
next_pending.update(p for p in item[1] if p not in processed)
511
pending = next_pending
512
if NULL_REVISION in todo:
513
todo.remove(NULL_REVISION)
645
514
trace.mutter('sending revisions %r', todo)
647
516
pb = ui.ui_factory.nested_progress_bar()