bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
|
0.401.2
by Jelmer Vernooij
Move all InterRepository implementations into interrepo. |
1 |
# Copyright (C) 2009-2018 Jelmer Vernooij <jelmer@jelmer.uk>
|
2 |
#
|
|
3 |
# This program is free software; you can redistribute it and/or modify
|
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
7 |
#
|
|
8 |
# This program is distributed in the hope that it will be useful,
|
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
12 |
#
|
|
13 |
# You should have received a copy of the GNU General Public License
|
|
14 |
# along with this program; if not, write to the Free Software
|
|
15 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
16 |
||
17 |
"""InterRepository operations."""
|
|
18 |
||
19 |
from __future__ import absolute_import |
|
20 |
||
21 |
from io import BytesIO |
|
22 |
||
23 |
from dulwich.errors import ( |
|
24 |
NotCommitError, |
|
25 |
)
|
|
26 |
from dulwich.object_store import ( |
|
27 |
ObjectStoreGraphWalker, |
|
28 |
)
|
|
29 |
from dulwich.protocol import ( |
|
30 |
CAPABILITY_THIN_PACK, |
|
31 |
ZERO_SHA, |
|
32 |
)
|
|
33 |
from dulwich.walk import Walker |
|
34 |
||
35 |
from ...errors import ( |
|
36 |
FetchLimitUnsupported, |
|
37 |
InvalidRevisionId, |
|
38 |
LossyPushToSameVCS, |
|
39 |
NoRoundtrippingSupport, |
|
40 |
NoSuchRevision, |
|
41 |
)
|
|
42 |
from ...repository import ( |
|
43 |
InterRepository, |
|
44 |
)
|
|
45 |
from ...revision import ( |
|
46 |
NULL_REVISION, |
|
47 |
)
|
|
48 |
from ... import ( |
|
49 |
trace, |
|
50 |
ui, |
|
51 |
)
|
|
52 |
||
53 |
from .errors import ( |
|
54 |
NoPushSupport, |
|
55 |
)
|
|
56 |
from .fetch import ( |
|
57 |
import_git_objects, |
|
58 |
report_git_progress, |
|
59 |
DetermineWantsRecorder, |
|
60 |
)
|
|
61 |
from .mapping import ( |
|
62 |
needs_roundtripping, |
|
63 |
)
|
|
64 |
from .object_store import ( |
|
65 |
get_object_store, |
|
66 |
_tree_to_objects, |
|
67 |
)
|
|
68 |
from .push import ( |
|
69 |
MissingObjectsIterator, |
|
70 |
)
|
|
71 |
from .refs import ( |
|
72 |
is_tag, |
|
73 |
)
|
|
74 |
from .repository import ( |
|
75 |
GitRepository, |
|
76 |
LocalGitRepository, |
|
77 |
GitRepositoryFormat, |
|
78 |
)
|
|
79 |
from .remote import ( |
|
80 |
RemoteGitRepository, |
|
81 |
)
|
|
82 |
from .unpeel_map import ( |
|
83 |
UnpeelMap, |
|
84 |
)
|
|
85 |
||
86 |
||
87 |
class InterToGitRepository(InterRepository): |
|
88 |
"""InterRepository that copies into a Git repository.""" |
|
89 |
||
90 |
_matching_repo_format = GitRepositoryFormat() |
|
91 |
||
92 |
def __init__(self, source, target): |
|
93 |
super(InterToGitRepository, self).__init__(source, target) |
|
94 |
self.mapping = self.target.get_mapping() |
|
95 |
self.source_store = get_object_store(self.source, self.mapping) |
|
96 |
||
97 |
@staticmethod
|
|
98 |
def _get_repo_format_to_test(): |
|
99 |
return None |
|
100 |
||
101 |
def copy_content(self, revision_id=None, pb=None): |
|
102 |
"""See InterRepository.copy_content.""" |
|
103 |
self.fetch(revision_id, pb, find_ghosts=False) |
|
104 |
||
105 |
def fetch_refs(self, update_refs, lossy): |
|
106 |
"""Fetch possibly roundtripped revisions into the target repository |
|
107 |
and update refs.
|
|
108 |
||
109 |
:param update_refs: Generate refs to fetch. Receives dictionary
|
|
110 |
with old refs (git shas), returns dictionary of new names to
|
|
111 |
git shas.
|
|
112 |
:param lossy: Whether to roundtrip
|
|
113 |
:return: old refs, new refs
|
|
114 |
"""
|
|
115 |
raise NotImplementedError(self.fetch_refs) |
|
116 |
||
117 |
def search_missing_revision_ids(self, |
|
118 |
find_ghosts=True, revision_ids=None, if_present_ids=None, |
|
119 |
limit=None): |
|
120 |
if limit is not None: |
|
121 |
raise FetchLimitUnsupported(self) |
|
122 |
git_shas = [] |
|
123 |
todo = [] |
|
124 |
if revision_ids: |
|
125 |
todo.extend(revision_ids) |
|
126 |
if if_present_ids: |
|
127 |
todo.extend(revision_ids) |
|
128 |
with self.source_store.lock_read(): |
|
129 |
for revid in revision_ids: |
|
130 |
if revid == NULL_REVISION: |
|
131 |
continue
|
|
132 |
git_sha = self.source_store._lookup_revision_sha1(revid) |
|
133 |
git_shas.append(git_sha) |
|
134 |
walker = Walker(self.source_store, |
|
|
0.401.3
by Jelmer Vernooij
Formatting fixes. |
135 |
include=git_shas, exclude=[ |
136 |
sha for sha in self.target.controldir.get_refs_container().as_dict().values() |
|
137 |
if sha != ZERO_SHA]) |
|
|
0.401.2
by Jelmer Vernooij
Move all InterRepository implementations into interrepo. |
138 |
missing_revids = set() |
139 |
for entry in walker: |
|
140 |
for (kind, type_data) in self.source_store.lookup_git_sha(entry.commit.id): |
|
141 |
if kind == "commit": |
|
142 |
missing_revids.add(type_data[0]) |
|
143 |
return self.source.revision_ids_to_search_result(missing_revids) |
|
144 |
||
145 |
def _warn_slow(self): |
|
146 |
trace.warning( |
|
147 |
'Pushing from a Bazaar to a Git repository. '
|
|
148 |
'For better performance, push into a Bazaar repository.') |
|
149 |
||
150 |
||
151 |
class InterToLocalGitRepository(InterToGitRepository): |
|
152 |
"""InterBranch implementation between a Bazaar and a Git repository.""" |
|
153 |
||
154 |
def __init__(self, source, target): |
|
155 |
super(InterToLocalGitRepository, self).__init__(source, target) |
|
156 |
self.target_store = self.target.controldir._git.object_store |
|
157 |
self.target_refs = self.target.controldir._git.refs |
|
158 |
||
159 |
def _commit_needs_fetching(self, sha_id): |
|
160 |
try: |
|
161 |
return (sha_id not in self.target_store) |
|
162 |
except NoSuchRevision: |
|
163 |
# Ghost, can't push
|
|
164 |
return False |
|
165 |
||
166 |
def _revision_needs_fetching(self, sha_id, revid): |
|
167 |
if revid == NULL_REVISION: |
|
168 |
return False |
|
169 |
if sha_id is None: |
|
170 |
try: |
|
171 |
sha_id = self.source_store._lookup_revision_sha1(revid) |
|
172 |
except KeyError: |
|
173 |
return False |
|
174 |
return self._commit_needs_fetching(sha_id) |
|
175 |
||
176 |
def missing_revisions(self, stop_revisions): |
|
177 |
"""Find the revisions that are missing from the target repository. |
|
178 |
||
179 |
:param stop_revisions: Revisions to check for (tuples with
|
|
180 |
Git SHA1, bzr revid)
|
|
181 |
:return: sequence of missing revisions, in topological order
|
|
182 |
:raise: NoSuchRevision if the stop_revisions are not present in
|
|
183 |
the source
|
|
184 |
"""
|
|
185 |
revid_sha_map = {} |
|
186 |
stop_revids = [] |
|
187 |
for (sha1, revid) in stop_revisions: |
|
188 |
if sha1 is not None and revid is not None: |
|
189 |
revid_sha_map[revid] = sha1 |
|
190 |
stop_revids.append(revid) |
|
191 |
elif sha1 is not None: |
|
192 |
if self._commit_needs_fetching(sha1): |
|
193 |
for (kind, (revid, tree_sha, verifiers)) in self.source_store.lookup_git_sha(sha1): |
|
194 |
revid_sha_map[revid] = sha1 |
|
195 |
stop_revids.append(revid) |
|
196 |
else: |
|
197 |
if revid is None: |
|
198 |
raise AssertionError |
|
199 |
stop_revids.append(revid) |
|
200 |
missing = set() |
|
201 |
graph = self.source.get_graph() |
|
202 |
pb = ui.ui_factory.nested_progress_bar() |
|
203 |
try: |
|
204 |
while stop_revids: |
|
205 |
new_stop_revids = [] |
|
206 |
for revid in stop_revids: |
|
207 |
sha1 = revid_sha_map.get(revid) |
|
208 |
if (not revid in missing and |
|
209 |
self._revision_needs_fetching(sha1, revid)): |
|
210 |
missing.add(revid) |
|
211 |
new_stop_revids.append(revid) |
|
212 |
stop_revids = set() |
|
213 |
parent_map = graph.get_parent_map(new_stop_revids) |
|
214 |
for parent_revids in parent_map.itervalues(): |
|
215 |
stop_revids.update(parent_revids) |
|
216 |
pb.update("determining revisions to fetch", len(missing)) |
|
217 |
finally: |
|
218 |
pb.finished() |
|
219 |
return graph.iter_topo_order(missing) |
|
220 |
||
221 |
def _get_target_bzr_refs(self): |
|
222 |
"""Return a dictionary with references. |
|
223 |
||
224 |
:return: Dictionary with reference names as keys and tuples
|
|
225 |
with Git SHA, Bazaar revid as values.
|
|
226 |
"""
|
|
227 |
bzr_refs = {} |
|
228 |
refs = {} |
|
229 |
for k in self.target._git.refs.allkeys(): |
|
230 |
try: |
|
231 |
v = self.target._git.refs[k] |
|
232 |
except KeyError: |
|
233 |
# broken symref?
|
|
234 |
continue
|
|
235 |
try: |
|
236 |
for (kind, type_data) in self.source_store.lookup_git_sha(v): |
|
237 |
if kind == "commit" and self.source.has_revision(type_data[0]): |
|
238 |
revid = type_data[0] |
|
239 |
break
|
|
240 |
else: |
|
241 |
revid = None |
|
242 |
except KeyError: |
|
243 |
revid = None |
|
244 |
bzr_refs[k] = (v, revid) |
|
245 |
return bzr_refs |
|
246 |
||
247 |
def fetch_refs(self, update_refs, lossy): |
|
|
0.403.1
by Jelmer Vernooij
Properly warn on slow push from bzr->git. |
248 |
self._warn_slow() |
|
0.401.2
by Jelmer Vernooij
Move all InterRepository implementations into interrepo. |
249 |
with self.source_store.lock_read(): |
250 |
old_refs = self._get_target_bzr_refs() |
|
251 |
new_refs = update_refs(old_refs) |
|
252 |
revidmap = self.fetch_objects( |
|
253 |
[(git_sha, bzr_revid) for (git_sha, bzr_revid) in new_refs.values() if git_sha is None or not git_sha.startswith('ref:')], lossy=lossy) |
|
254 |
for name, (gitid, revid) in new_refs.iteritems(): |
|
255 |
if gitid is None: |
|
256 |
try: |
|
257 |
gitid = revidmap[revid][0] |
|
258 |
except KeyError: |
|
259 |
gitid = self.source_store._lookup_revision_sha1(revid) |
|
260 |
if len(gitid) != 40 and not gitid.startswith('ref: '): |
|
261 |
raise AssertionError("invalid ref contents: %r" % gitid) |
|
262 |
self.target_refs[name] = gitid |
|
263 |
return revidmap, old_refs, new_refs |
|
264 |
||
265 |
def fetch_objects(self, revs, lossy, limit=None): |
|
266 |
if not lossy and not self.mapping.roundtripping: |
|
267 |
for git_sha, bzr_revid in revs: |
|
268 |
if bzr_revid is not None and needs_roundtripping(self.source, bzr_revid): |
|
269 |
raise NoPushSupport(self.source, self.target, self.mapping, |
|
270 |
bzr_revid) |
|
271 |
with self.source_store.lock_read(): |
|
272 |
todo = list(self.missing_revisions(revs))[:limit] |
|
273 |
revidmap = {} |
|
274 |
pb = ui.ui_factory.nested_progress_bar() |
|
275 |
try: |
|
276 |
object_generator = MissingObjectsIterator( |
|
277 |
self.source_store, self.source, pb) |
|
278 |
for (old_revid, git_sha) in object_generator.import_revisions( |
|
279 |
todo, lossy=lossy): |
|
280 |
if lossy: |
|
281 |
new_revid = self.mapping.revision_id_foreign_to_bzr(git_sha) |
|
282 |
else: |
|
283 |
new_revid = old_revid |
|
284 |
try: |
|
285 |
self.mapping.revision_id_bzr_to_foreign(old_revid) |
|
286 |
except InvalidRevisionId: |
|
287 |
refname = self.mapping.revid_as_refname(old_revid) |
|
288 |
self.target_refs[refname] = git_sha |
|
289 |
revidmap[old_revid] = (git_sha, new_revid) |
|
290 |
self.target_store.add_objects(object_generator) |
|
291 |
return revidmap |
|
292 |
finally: |
|
293 |
pb.finished() |
|
294 |
||
295 |
def fetch(self, revision_id=None, pb=None, find_ghosts=False, |
|
296 |
fetch_spec=None, mapped_refs=None): |
|
297 |
if mapped_refs is not None: |
|
298 |
stop_revisions = mapped_refs |
|
299 |
elif revision_id is not None: |
|
300 |
stop_revisions = [(None, revision_id)] |
|
301 |
elif fetch_spec is not None: |
|
302 |
recipe = fetch_spec.get_recipe() |
|
303 |
if recipe[0] in ("search", "proxy-search"): |
|
304 |
stop_revisions = [(None, revid) for revid in recipe[1]] |
|
305 |
else: |
|
306 |
raise AssertionError("Unsupported search result type %s" % recipe[0]) |
|
307 |
else: |
|
308 |
stop_revisions = [(None, revid) for revid in self.source.all_revision_ids()] |
|
309 |
self._warn_slow() |
|
310 |
try: |
|
311 |
self.fetch_objects(stop_revisions, lossy=False) |
|
312 |
except NoPushSupport: |
|
313 |
raise NoRoundtrippingSupport(self.source, self.target) |
|
314 |
||
315 |
@staticmethod
|
|
316 |
def is_compatible(source, target): |
|
317 |
"""Be compatible with GitRepository.""" |
|
318 |
return (not isinstance(source, GitRepository) and |
|
319 |
isinstance(target, LocalGitRepository)) |
|
320 |
||
321 |
||
322 |
class InterToRemoteGitRepository(InterToGitRepository): |
|
323 |
||
324 |
def fetch_refs(self, update_refs, lossy): |
|
325 |
"""Import the gist of the ancestry of a particular revision.""" |
|
326 |
if not lossy and not self.mapping.roundtripping: |
|
327 |
raise NoPushSupport(self.source, self.target, self.mapping) |
|
328 |
unpeel_map = UnpeelMap.from_repository(self.source) |
|
329 |
revidmap = {} |
|
330 |
def determine_wants(old_refs): |
|
331 |
ret = {} |
|
332 |
self.old_refs = dict([(k, (v, None)) for (k, v) in old_refs.iteritems()]) |
|
333 |
self.new_refs = update_refs(self.old_refs) |
|
334 |
for name, (gitid, revid) in self.new_refs.iteritems(): |
|
335 |
if gitid is None: |
|
336 |
git_sha = self.source_store._lookup_revision_sha1(revid) |
|
337 |
ret[name] = unpeel_map.re_unpeel_tag(git_sha, old_refs.get(name)) |
|
338 |
else: |
|
339 |
ret[name] = gitid |
|
340 |
return ret |
|
341 |
self._warn_slow() |
|
342 |
with self.source_store.lock_read(): |
|
343 |
new_refs = self.target.send_pack(determine_wants, |
|
344 |
self.source_store.generate_lossy_pack_data) |
|
345 |
# FIXME: revidmap?
|
|
346 |
return revidmap, self.old_refs, self.new_refs |
|
347 |
||
348 |
@staticmethod
|
|
349 |
def is_compatible(source, target): |
|
350 |
"""Be compatible with GitRepository.""" |
|
351 |
return (not isinstance(source, GitRepository) and |
|
352 |
isinstance(target, RemoteGitRepository)) |
|
353 |
||
354 |
||
355 |
class InterFromGitRepository(InterRepository): |
|
356 |
||
357 |
_matching_repo_format = GitRepositoryFormat() |
|
358 |
||
359 |
def _target_has_shas(self, shas): |
|
360 |
raise NotImplementedError(self._target_has_shas) |
|
361 |
||
362 |
def get_determine_wants_heads(self, wants, include_tags=False): |
|
363 |
wants = set(wants) |
|
364 |
def determine_wants(refs): |
|
365 |
potential = set(wants) |
|
366 |
if include_tags: |
|
367 |
for k, unpeeled in refs.iteritems(): |
|
368 |
if k.endswith("^{}"): |
|
369 |
continue
|
|
370 |
if not is_tag(k): |
|
371 |
continue
|
|
372 |
if unpeeled == ZERO_SHA: |
|
373 |
continue
|
|
374 |
potential.add(unpeeled) |
|
375 |
return list(potential - self._target_has_shas(potential)) |
|
376 |
return determine_wants |
|
377 |
||
378 |
def determine_wants_all(self, refs): |
|
379 |
raise NotImplementedError(self.determine_wants_all) |
|
380 |
||
381 |
@staticmethod
|
|
382 |
def _get_repo_format_to_test(): |
|
383 |
return None |
|
384 |
||
385 |
def copy_content(self, revision_id=None): |
|
386 |
"""See InterRepository.copy_content.""" |
|
387 |
self.fetch(revision_id, find_ghosts=False) |
|
388 |
||
389 |
def search_missing_revision_ids(self, |
|
390 |
find_ghosts=True, revision_ids=None, if_present_ids=None, |
|
391 |
limit=None): |
|
392 |
if limit is not None: |
|
393 |
raise FetchLimitUnsupported(self) |
|
394 |
git_shas = [] |
|
395 |
todo = [] |
|
396 |
if revision_ids: |
|
397 |
todo.extend(revision_ids) |
|
398 |
if if_present_ids: |
|
399 |
todo.extend(revision_ids) |
|
|
0.401.3
by Jelmer Vernooij
Formatting fixes. |
400 |
with self.lock_read(): |
401 |
for revid in revision_ids: |
|
402 |
if revid == NULL_REVISION: |
|
403 |
continue
|
|
404 |
git_sha, mapping = self.source.lookup_bzr_revision_id(revid) |
|
405 |
git_shas.append(git_sha) |
|
406 |
walker = Walker(self.source._git.object_store, |
|
407 |
include=git_shas, exclude=[ |
|
408 |
sha for sha in self.target.controldir.get_refs_container().as_dict().values() |
|
409 |
if sha != ZERO_SHA]) |
|
410 |
missing_revids = set() |
|
411 |
for entry in walker: |
|
412 |
missing_revids.add(self.source.lookup_foreign_revision_id(entry.commit.id)) |
|
413 |
return self.source.revision_ids_to_search_result(missing_revids) |
|
|
0.401.2
by Jelmer Vernooij
Move all InterRepository implementations into interrepo. |
414 |
|
415 |
||
416 |
class InterGitNonGitRepository(InterFromGitRepository): |
|
417 |
"""Base InterRepository that copies revisions from a Git into a non-Git |
|
418 |
repository."""
|
|
419 |
||
420 |
def _target_has_shas(self, shas): |
|
421 |
revids = {} |
|
422 |
for sha in shas: |
|
423 |
try: |
|
424 |
revid = self.source.lookup_foreign_revision_id(sha) |
|
425 |
except NotCommitError: |
|
426 |
# Commit is definitely not present
|
|
427 |
continue
|
|
428 |
else: |
|
429 |
revids[revid] = sha |
|
430 |
return set([revids[r] for r in self.target.has_revisions(revids)]) |
|
431 |
||
432 |
def determine_wants_all(self, refs): |
|
433 |
potential = set() |
|
434 |
for k, v in refs.iteritems(): |
|
435 |
# For non-git target repositories, only worry about peeled
|
|
436 |
if v == ZERO_SHA: |
|
437 |
continue
|
|
438 |
potential.add(self.source.controldir.get_peeled(k) or v) |
|
439 |
return list(potential - self._target_has_shas(potential)) |
|
440 |
||
441 |
def get_determine_wants_heads(self, wants, include_tags=False): |
|
442 |
wants = set(wants) |
|
443 |
def determine_wants(refs): |
|
444 |
potential = set(wants) |
|
445 |
if include_tags: |
|
446 |
for k, unpeeled in refs.iteritems(): |
|
447 |
if not is_tag(k): |
|
448 |
continue
|
|
449 |
if unpeeled == ZERO_SHA: |
|
450 |
continue
|
|
451 |
potential.add(self.source.controldir.get_peeled(k) or unpeeled) |
|
452 |
return list(potential - self._target_has_shas(potential)) |
|
453 |
return determine_wants |
|
454 |
||
455 |
def _warn_slow(self): |
|
456 |
trace.warning( |
|
457 |
'Fetching from Git to Bazaar repository. '
|
|
458 |
'For better performance, fetch into a Git repository.') |
|
459 |
||
460 |
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False): |
|
461 |
"""Fetch objects from a remote server. |
|
462 |
||
463 |
:param determine_wants: determine_wants callback
|
|
464 |
:param mapping: BzrGitMapping to use
|
|
465 |
:param limit: Maximum number of commits to import.
|
|
466 |
:return: Tuple with pack hint, last imported revision id and remote refs
|
|
467 |
"""
|
|
468 |
raise NotImplementedError(self.fetch_objects) |
|
469 |
||
470 |
def get_determine_wants_revids(self, revids, include_tags=False): |
|
471 |
wants = set() |
|
472 |
for revid in set(revids): |
|
473 |
if self.target.has_revision(revid): |
|
474 |
continue
|
|
475 |
git_sha, mapping = self.source.lookup_bzr_revision_id(revid) |
|
476 |
wants.add(git_sha) |
|
477 |
return self.get_determine_wants_heads(wants, include_tags=include_tags) |
|
478 |
||
479 |
def fetch(self, revision_id=None, find_ghosts=False, |
|
480 |
mapping=None, fetch_spec=None, include_tags=False): |
|
481 |
if mapping is None: |
|
482 |
mapping = self.source.get_mapping() |
|
483 |
if revision_id is not None: |
|
484 |
interesting_heads = [revision_id] |
|
485 |
elif fetch_spec is not None: |
|
486 |
recipe = fetch_spec.get_recipe() |
|
487 |
if recipe[0] in ("search", "proxy-search"): |
|
488 |
interesting_heads = recipe[1] |
|
489 |
else: |
|
490 |
raise AssertionError("Unsupported search result type %s" % |
|
491 |
recipe[0]) |
|
492 |
else: |
|
493 |
interesting_heads = None |
|
494 |
||
495 |
if interesting_heads is not None: |
|
496 |
determine_wants = self.get_determine_wants_revids( |
|
497 |
interesting_heads, include_tags=include_tags) |
|
498 |
else: |
|
499 |
determine_wants = self.determine_wants_all |
|
500 |
||
501 |
(pack_hint, _, remote_refs) = self.fetch_objects(determine_wants, |
|
502 |
mapping) |
|
503 |
if pack_hint is not None and self.target._format.pack_compresses: |
|
504 |
self.target.pack(hint=pack_hint) |
|
505 |
return remote_refs |
|
506 |
||
507 |
||
508 |
class InterRemoteGitNonGitRepository(InterGitNonGitRepository): |
|
509 |
"""InterRepository that copies revisions from a remote Git into a non-Git |
|
510 |
repository."""
|
|
511 |
||
512 |
def get_target_heads(self): |
|
513 |
# FIXME: This should be more efficient
|
|
514 |
all_revs = self.target.all_revision_ids() |
|
515 |
parent_map = self.target.get_parent_map(all_revs) |
|
516 |
all_parents = set() |
|
517 |
map(all_parents.update, parent_map.itervalues()) |
|
518 |
return set(all_revs) - all_parents |
|
519 |
||
520 |
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False): |
|
521 |
"""See `InterGitNonGitRepository`.""" |
|
522 |
self._warn_slow() |
|
523 |
store = get_object_store(self.target, mapping) |
|
524 |
with store.lock_write(): |
|
525 |
heads = self.get_target_heads() |
|
526 |
graph_walker = ObjectStoreGraphWalker( |
|
527 |
[store._lookup_revision_sha1(head) for head in heads], |
|
528 |
lambda sha: store[sha].parents) |
|
529 |
wants_recorder = DetermineWantsRecorder(determine_wants) |
|
530 |
||
531 |
pb = ui.ui_factory.nested_progress_bar() |
|
532 |
try: |
|
533 |
objects_iter = self.source.fetch_objects( |
|
534 |
wants_recorder, graph_walker, store.get_raw, |
|
535 |
progress=lambda text: report_git_progress(pb, text),) |
|
536 |
trace.mutter("Importing %d new revisions", |
|
537 |
len(wants_recorder.wants)) |
|
538 |
(pack_hint, last_rev) = import_git_objects(self.target, |
|
539 |
mapping, objects_iter, store, wants_recorder.wants, pb, |
|
540 |
limit) |
|
541 |
return (pack_hint, last_rev, wants_recorder.remote_refs) |
|
542 |
finally: |
|
543 |
pb.finished() |
|
544 |
||
545 |
@staticmethod
|
|
546 |
def is_compatible(source, target): |
|
547 |
"""Be compatible with GitRepository.""" |
|
548 |
if not isinstance(source, RemoteGitRepository): |
|
549 |
return False |
|
550 |
if not target.supports_rich_root(): |
|
551 |
return False |
|
552 |
if isinstance(target, GitRepository): |
|
553 |
return False |
|
554 |
if not getattr(target._format, "supports_full_versioned_files", True): |
|
555 |
return False |
|
556 |
return True |
|
557 |
||
558 |
||
559 |
class InterLocalGitNonGitRepository(InterGitNonGitRepository): |
|
560 |
"""InterRepository that copies revisions from a local Git into a non-Git |
|
561 |
repository."""
|
|
562 |
||
563 |
def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False): |
|
564 |
"""See `InterGitNonGitRepository`.""" |
|
565 |
self._warn_slow() |
|
566 |
remote_refs = self.source.controldir.get_refs_container().as_dict() |
|
567 |
wants = determine_wants(remote_refs) |
|
568 |
create_pb = None |
|
569 |
pb = ui.ui_factory.nested_progress_bar() |
|
570 |
target_git_object_retriever = get_object_store(self.target, mapping) |
|
571 |
try: |
|
572 |
target_git_object_retriever.lock_write() |
|
573 |
try: |
|
574 |
(pack_hint, last_rev) = import_git_objects(self.target, |
|
575 |
mapping, self.source._git.object_store, |
|
576 |
target_git_object_retriever, wants, pb, limit) |
|
577 |
return (pack_hint, last_rev, remote_refs) |
|
578 |
finally: |
|
579 |
target_git_object_retriever.unlock() |
|
580 |
finally: |
|
581 |
pb.finished() |
|
582 |
||
583 |
@staticmethod
|
|
584 |
def is_compatible(source, target): |
|
585 |
"""Be compatible with GitRepository.""" |
|
586 |
if not isinstance(source, LocalGitRepository): |
|
587 |
return False |
|
588 |
if not target.supports_rich_root(): |
|
589 |
return False |
|
590 |
if isinstance(target, GitRepository): |
|
591 |
return False |
|
592 |
if not getattr(target._format, "supports_full_versioned_files", True): |
|
593 |
return False |
|
594 |
return True |
|
595 |
||
596 |
||
597 |
class InterGitGitRepository(InterFromGitRepository): |
|
598 |
"""InterRepository that copies between Git repositories.""" |
|
599 |
||
|
0.401.3
by Jelmer Vernooij
Formatting fixes. |
600 |
def fetch_refs(self, update_refs, lossy): |
|
0.401.2
by Jelmer Vernooij
Move all InterRepository implementations into interrepo. |
601 |
if lossy: |
602 |
raise LossyPushToSameVCS(self.source, self.target) |
|
603 |
old_refs = self.target.controldir.get_refs_container() |
|
604 |
ref_changes = {} |
|
605 |
def determine_wants(heads): |
|
606 |
old_refs = dict([(k, (v, None)) for (k, v) in heads.as_dict().iteritems()]) |
|
607 |
new_refs = update_refs(old_refs) |
|
608 |
ref_changes.update(new_refs) |
|
609 |
return [sha1 for (sha1, bzr_revid) in new_refs.itervalues()] |
|
610 |
self.fetch_objects(determine_wants, lossy=lossy) |
|
611 |
for k, (git_sha, bzr_revid) in ref_changes.iteritems(): |
|
612 |
self.target._git.refs[k] = git_sha |
|
613 |
new_refs = self.target.controldir.get_refs_container() |
|
614 |
return None, old_refs, new_refs |
|
615 |
||
616 |
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False): |
|
617 |
raise NotImplementedError(self.fetch_objects) |
|
618 |
||
619 |
def _target_has_shas(self, shas): |
|
620 |
return set([sha for sha in shas if sha in self.target._git.object_store]) |
|
621 |
||
622 |
def fetch(self, revision_id=None, find_ghosts=False, |
|
623 |
mapping=None, fetch_spec=None, branches=None, limit=None, include_tags=False): |
|
624 |
if mapping is None: |
|
625 |
mapping = self.source.get_mapping() |
|
626 |
if revision_id is not None: |
|
627 |
args = [revision_id] |
|
628 |
elif fetch_spec is not None: |
|
629 |
recipe = fetch_spec.get_recipe() |
|
630 |
if recipe[0] in ("search", "proxy-search"): |
|
631 |
heads = recipe[1] |
|
632 |
else: |
|
633 |
raise AssertionError( |
|
634 |
"Unsupported search result type %s" % recipe[0]) |
|
635 |
args = heads |
|
636 |
if branches is not None: |
|
637 |
def determine_wants(refs): |
|
638 |
ret = [] |
|
639 |
for name, value in refs.iteritems(): |
|
640 |
if value == ZERO_SHA: |
|
641 |
continue
|
|
642 |
||
643 |
if name in branches or (include_tags and is_tag(name)): |
|
644 |
ret.append(value) |
|
645 |
return ret |
|
646 |
elif fetch_spec is None and revision_id is None: |
|
647 |
determine_wants = self.determine_wants_all |
|
648 |
else: |
|
649 |
determine_wants = self.get_determine_wants_revids(args, include_tags=include_tags) |
|
650 |
wants_recorder = DetermineWantsRecorder(determine_wants) |
|
651 |
self.fetch_objects(wants_recorder, mapping, limit=limit) |
|
652 |
return wants_recorder.remote_refs |
|
653 |
||
654 |
def get_determine_wants_revids(self, revids, include_tags=False): |
|
655 |
wants = set() |
|
656 |
for revid in set(revids): |
|
657 |
if revid == NULL_REVISION: |
|
658 |
continue
|
|
659 |
git_sha, mapping = self.source.lookup_bzr_revision_id(revid) |
|
660 |
wants.add(git_sha) |
|
661 |
return self.get_determine_wants_heads(wants, include_tags=include_tags) |
|
662 |
||
663 |
def determine_wants_all(self, refs): |
|
664 |
potential = set([v for v in refs.values() if not v == ZERO_SHA]) |
|
665 |
return list(potential - self._target_has_shas(potential)) |
|
666 |
||
667 |
||
668 |
class InterLocalGitLocalGitRepository(InterGitGitRepository): |
|
669 |
||
670 |
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False): |
|
671 |
if lossy: |
|
672 |
raise LossyPushToSameVCS(self.source, self.target) |
|
673 |
if limit is not None: |
|
674 |
raise FetchLimitUnsupported(self) |
|
675 |
pb = ui.ui_factory.nested_progress_bar() |
|
676 |
try: |
|
677 |
refs = self.source._git.fetch(self.target._git, determine_wants, |
|
678 |
lambda text: report_git_progress(pb, text)) |
|
679 |
finally: |
|
680 |
pb.finished() |
|
681 |
return (None, None, refs) |
|
682 |
||
683 |
@staticmethod
|
|
684 |
def is_compatible(source, target): |
|
685 |
"""Be compatible with GitRepository.""" |
|
686 |
return (isinstance(source, LocalGitRepository) and |
|
687 |
isinstance(target, LocalGitRepository)) |
|
688 |
||
689 |
||
690 |
class InterRemoteGitLocalGitRepository(InterGitGitRepository): |
|
691 |
||
692 |
def fetch_objects(self, determine_wants, mapping=None, limit=None, lossy=False): |
|
693 |
if lossy: |
|
694 |
raise LossyPushToSameVCS(self.source, self.target) |
|
695 |
if limit is not None: |
|
696 |
raise FetchLimitUnsupported(self) |
|
697 |
graphwalker = self.target._git.get_graph_walker() |
|
698 |
pb = ui.ui_factory.nested_progress_bar() |
|
699 |
try: |
|
700 |
if CAPABILITY_THIN_PACK in self.source.controldir._client._fetch_capabilities: |
|
701 |
# TODO(jelmer): Avoid reading entire file into memory and
|
|
702 |
# only processing it after the whole file has been fetched.
|
|
703 |
f = BytesIO() |
|
704 |
||
705 |
def commit(): |
|
706 |
if f.tell(): |
|
707 |
f.seek(0) |
|
708 |
self.target._git.object_store.move_in_thin_pack(f) |
|
709 |
||
710 |
def abort(): |
|
711 |
pass
|
|
712 |
else: |
|
713 |
f, commit, abort = self.target._git.object_store.add_pack() |
|
714 |
try: |
|
715 |
refs = self.source.controldir.fetch_pack( |
|
716 |
determine_wants, graphwalker, f.write, |
|
717 |
lambda text: report_git_progress(pb, text)) |
|
718 |
commit() |
|
719 |
return (None, None, refs) |
|
720 |
except BaseException: |
|
721 |
abort() |
|
722 |
raise
|
|
723 |
finally: |
|
724 |
pb.finished() |
|
725 |
||
726 |
@staticmethod
|
|
727 |
def is_compatible(source, target): |
|
728 |
"""Be compatible with GitRepository.""" |
|
729 |
return (isinstance(source, RemoteGitRepository) and |
|
730 |
isinstance(target, LocalGitRepository)) |