79
70
count_copied -- number of revisions copied
81
This should not be used directory, its essential a object to encapsulate
72
This should not be used directly, it's essential a object to encapsulate
82
73
the logic in InterRepository.fetch().
84
def __init__(self, to_repository, from_repository, last_revision=None, pb=None):
76
def __init__(self, to_repository, from_repository, last_revision=None, pb=None,
78
"""Create a repo fetcher.
80
:param find_ghosts: If True search the entire history for ghosts.
85
82
# result variables.
86
83
self.failed_revisions = []
87
84
self.count_copied = 0
88
if to_repository.control_files._transport.base == from_repository.control_files._transport.base:
89
# check that last_revision is in 'from' and then return a no-operation.
90
if last_revision not in (None, NULL_REVISION):
91
from_repository.get_revision(last_revision)
85
if to_repository.has_same_location(from_repository):
86
# repository.fetch should be taking care of this case.
87
raise errors.BzrError('RepoFetcher run '
88
'between two objects at the same location: '
89
'%r and %r' % (to_repository, from_repository))
93
90
self.to_repository = to_repository
94
91
self.from_repository = from_repository
95
92
# must not mutate self._last_revision as its potentially a shared instance
96
93
self._last_revision = last_revision
94
self.find_ghosts = find_ghosts
98
96
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
99
97
self.nested_pb = self.pb
119
126
requested revisions, finally clearing the progress bar.
121
128
self.to_weaves = self.to_repository.weave_store
122
self.to_control = self.to_repository.control_weaves
123
129
self.from_weaves = self.from_repository.weave_store
124
self.from_control = self.from_repository.control_weaves
125
130
self.count_total = 0
126
131
self.file_ids_names = {}
127
pp = ProgressPhase('fetch phase', 4, self.pb)
132
pp = ProgressPhase('Transferring', 4, self.pb)
129
revs = self._revids_to_fetch()
133
self._fetch_weave_texts(revs)
135
self._fetch_inventory_weave(revs)
137
self._fetch_revision_texts(revs)
138
self.count_copied += len(revs)
135
search = self._revids_to_fetch()
138
if getattr(self, '_fetch_everything_for_search', None) is not None:
139
self._fetch_everything_for_search(search, pp)
141
# backward compatibility
142
self._fetch_everything_for_revisions(search.get_keys, pp)
146
def _fetch_everything_for_search(self, search, pp):
147
"""Fetch all data for the given set of revisions."""
148
# The first phase is "file". We pass the progress bar for it directly
149
# into item_keys_introduced_by, which has more information about how
150
# that phase is progressing than we do. Progress updates for the other
151
# phases are taken care of in this function.
152
# XXX: there should be a clear owner of the progress reporting. Perhaps
153
# item_keys_introduced_by should have a richer API than it does at the
154
# moment, so that it can feed the progress information back to this
157
pb = bzrlib.ui.ui_factory.nested_progress_bar()
159
revs = search.get_keys()
160
graph = self.from_repository.get_graph()
161
revs = list(graph.iter_topo_order(revs))
162
data_to_fetch = self.from_repository.item_keys_introduced_by(revs,
164
for knit_kind, file_id, revisions in data_to_fetch:
165
if knit_kind != phase:
167
# Make a new progress bar for this phase
170
pb = bzrlib.ui.ui_factory.nested_progress_bar()
171
if knit_kind == "file":
172
self._fetch_weave_text(file_id, revisions)
173
elif knit_kind == "inventory":
174
# Before we process the inventory we generate the root
175
# texts (if necessary) so that the inventories references
177
self._generate_root_texts(revs)
178
# NB: This currently reopens the inventory weave in source;
179
# using a full get_data_stream instead would avoid this.
180
self._fetch_inventory_weave(revs, pb)
181
elif knit_kind == "signatures":
182
# Nothing to do here; this will be taken care of when
183
# _fetch_revision_texts happens.
185
elif knit_kind == "revisions":
186
self._fetch_revision_texts(revs, pb)
188
raise AssertionError("Unknown knit kind %r" % knit_kind)
192
self.count_copied += len(revs)
142
194
def _revids_to_fetch(self):
195
"""Determines the exact revisions needed from self.from_repository to
196
install self._last_revision in self.to_repository.
198
If no revisions need to be fetched, then this just returns None.
143
200
mutter('fetch up to rev {%s}', self._last_revision)
144
201
if self._last_revision is NULL_REVISION:
145
202
# explicit limit of no revisions needed
147
if (self._last_revision != None and
204
if (self._last_revision is not None and
148
205
self.to_repository.has_revision(self._last_revision)):
152
return self.to_repository.missing_revision_ids(self.from_repository,
208
return self.to_repository.search_missing_revision_ids(
209
self.from_repository, self._last_revision,
210
find_ghosts=self.find_ghosts)
154
211
except errors.NoSuchRevision:
155
212
raise InstallFailed([self._last_revision])
157
def _fetch_weave_texts(self, revs):
158
texts_pb = bzrlib.ui.ui_factory.nested_progress_bar()
160
file_ids = self.from_repository.fileid_involved_by_set(revs)
162
num_file_ids = len(file_ids)
163
for file_id in file_ids:
164
texts_pb.update("fetch texts", count, num_file_ids)
167
to_weave = self.to_weaves.get_weave(file_id,
168
self.to_repository.get_transaction())
169
except errors.NoSuchFile:
170
# destination is empty, just copy it.
171
# this copies all the texts, which is useful and
172
# on per-file basis quite cheap.
173
self.to_weaves.copy_multi(
177
self.from_repository.get_transaction(),
178
self.to_repository.get_transaction())
180
# destination has contents, must merge
181
from_weave = self.from_weaves.get_weave(file_id,
182
self.from_repository.get_transaction())
183
# we fetch all the texts, because texts do
184
# not reference anything, and its cheap enough
185
to_weave.join(from_weave)
189
def _fetch_inventory_weave(self, revs):
190
pb = bzrlib.ui.ui_factory.nested_progress_bar()
192
pb.update("fetch inventory", 0, 2)
193
to_weave = self.to_control.get_weave('inventory',
194
self.to_repository.get_transaction())
196
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
198
# just merge, this is optimisable and its means we dont
199
# copy unreferenced data such as not-needed inventories.
200
pb.update("fetch inventory", 1, 3)
201
from_weave = self.from_repository.get_inventory_weave()
202
pb.update("fetch inventory", 2, 3)
203
# we fetch only the referenced inventories because we do not
204
# know for unselected inventories whether all their required
205
# texts are present in the other repository - it could be
207
to_weave.join(from_weave, pb=child_pb, msg='merge inventory',
214
def _fetch_weave_text(self, file_id, required_versions):
215
to_weave = self.to_weaves.get_weave_or_empty(file_id,
216
self.to_repository.get_transaction())
217
from_weave = self.from_weaves.get_weave(file_id,
218
self.from_repository.get_transaction())
219
# Fetch all the texts.
220
to_weave.insert_record_stream(from_weave.get_record_stream(
221
required_versions, 'topological', False))
223
def _fetch_inventory_weave(self, revs, pb):
224
pb.update("fetch inventory", 0, 2)
225
to_weave = self.to_repository.get_inventory_weave()
226
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
228
# just merge, this is optimisable and its means we don't
229
# copy unreferenced data such as not-needed inventories.
230
pb.update("fetch inventory", 1, 3)
231
from_weave = self.from_repository.get_inventory_weave()
232
pb.update("fetch inventory", 2, 3)
233
# we fetch only the referenced inventories because we do not
234
# know for unselected inventories whether all their required
235
# texts are present in the other repository - it could be
237
to_weave.insert_record_stream(from_weave.get_record_stream(revs,
238
'topological', False))
242
def _generate_root_texts(self, revs):
243
"""This will be called by __fetch between fetching weave texts and
244
fetching the inventory weave.
246
Subclasses should override this if they need to generate root texts
247
after fetching weave texts.
215
252
class GenericRepoFetcher(RepoFetcher):
219
256
It triggers a reconciliation after fetching to ensure integrity.
222
def _fetch_revision_texts(self, revs):
259
def _fetch_revision_texts(self, revs, pb):
223
260
"""Fetch revision object texts"""
224
rev_pb = bzrlib.ui.ui_factory.nested_progress_bar()
226
to_txn = self.to_transaction = self.to_repository.get_transaction()
229
to_store = self.to_repository._revision_store
231
pb = bzrlib.ui.ui_factory.nested_progress_bar()
233
pb.update('copying revisions', count, total)
235
sig_text = self.from_repository.get_signature_text(rev)
236
to_store.add_revision_signature_text(rev, sig_text, to_txn)
237
except errors.NoSuchRevision:
240
to_store.add_revision(self.from_repository.get_revision(rev),
245
# fixup inventory if needed:
246
# this is expensive because we have no inverse index to current ghosts.
247
# but on local disk its a few seconds and sftp push is already insane.
249
# FIXME: repository should inform if this is needed.
250
self.to_repository.reconcile()
261
to_txn = self.to_transaction = self.to_repository.get_transaction()
264
to_store = self.to_repository._revision_store
266
pb.update('copying revisions', count, total)
268
sig_text = self.from_repository.get_signature_text(rev)
269
to_store.add_revision_signature_text(rev, sig_text, to_txn)
270
except errors.NoSuchRevision:
273
self._copy_revision(rev, to_txn)
275
# fixup inventory if needed:
276
# this is expensive because we have no inverse index to current ghosts.
277
# but on local disk its a few seconds and sftp push is already insane.
279
# FIXME: repository should inform if this is needed.
280
self.to_repository.reconcile()
282
def _copy_revision(self, rev, to_txn):
283
to_store = self.to_repository._revision_store
284
to_store.add_revision(self.from_repository.get_revision(rev), to_txn)
255
287
class KnitRepoFetcher(RepoFetcher):
256
288
"""This is a knit format repository specific fetcher.
269
301
from_sf = self.from_repository._revision_store.get_signature_file(
270
302
from_transaction)
271
to_sf.join(from_sf, version_ids=revs, ignore_missing=True)
303
# A missing signature is just skipped.
304
to_sf.insert_record_stream(filter_absent(from_sf.get_record_stream(revs,
305
'unordered', False)))
306
self._fetch_just_revision_texts(revs, from_transaction, to_transaction)
308
def _fetch_just_revision_texts(self, version_ids, from_transaction,
272
310
to_rf = self.to_repository._revision_store.get_revision_file(
274
312
from_rf = self.from_repository._revision_store.get_revision_file(
275
313
from_transaction)
276
to_rf.join(from_rf, version_ids=revs)
279
class Fetcher(object):
280
"""Backwards compatability glue for branch.fetch()."""
282
@deprecated_method(zero_eight)
283
def __init__(self, to_branch, from_branch, last_revision=None, pb=None):
284
"""Please see branch.fetch()."""
285
to_branch.fetch(from_branch, last_revision, pb)
314
to_rf.insert_record_stream(from_rf.get_record_stream(version_ids,
315
'topological', False))
318
class Inter1and2Helper(object):
319
"""Helper for operations that convert data from model 1 and 2
321
This is for use by fetchers and converters.
324
def __init__(self, source, target):
327
:param source: The repository data comes from
328
:param target: The repository data goes to
333
def iter_rev_trees(self, revs):
334
"""Iterate through RevisionTrees efficiently.
336
Additionally, the inventory's revision_id is set if unset.
338
Trees are retrieved in batches of 100, and then yielded in the order
341
:param revs: A list of revision ids
343
# In case that revs is not a list.
346
for tree in self.source.revision_trees(revs[:100]):
347
if tree.inventory.revision_id is None:
348
tree.inventory.revision_id = tree.get_revision_id()
352
def _find_root_ids(self, revs, parent_map, graph):
354
planned_versions = {}
355
for tree in self.iter_rev_trees(revs):
356
revision_id = tree.inventory.root.revision
357
root_id = tree.get_root_id()
358
planned_versions.setdefault(root_id, []).append(revision_id)
359
revision_root[revision_id] = root_id
360
# Find out which parents we don't already know root ids for
362
for revision_parents in parent_map.itervalues():
363
parents.update(revision_parents)
364
parents.difference_update(revision_root.keys() + [NULL_REVISION])
365
# Limit to revisions present in the versionedfile
366
parents = graph.get_parent_map(parents).keys()
367
for tree in self.iter_rev_trees(parents):
368
root_id = tree.get_root_id()
369
revision_root[tree.get_revision_id()] = root_id
370
return revision_root, planned_versions
372
def generate_root_texts(self, revs):
373
"""Generate VersionedFiles for all root ids.
375
:param revs: the revisions to include
377
to_store = self.target.weave_store
378
graph = self.source.get_graph()
379
parent_map = graph.get_parent_map(revs)
380
revision_root, planned_versions = self._find_root_ids(
381
revs, parent_map, graph)
382
for root_id, versions in planned_versions.iteritems():
383
versionedfile = to_store.get_weave_or_empty(root_id,
384
self.target.get_transaction())
386
for revision_id in versions:
387
if revision_id in versionedfile:
389
parents = parent_map[revision_id]
390
# We drop revision parents with different file-ids, because
391
# a version cannot have a version with another file-id as its
393
# When a parent revision is a ghost, we guess that its root id
395
parents = tuple(p for p in parents if p != NULL_REVISION
396
and revision_root.get(p, root_id) == root_id)
397
result = versionedfile.add_lines_with_ghosts(
398
revision_id, parents, [], parent_texts)
399
parent_texts[revision_id] = result[2]
401
def regenerate_inventory(self, revs):
402
"""Generate a new inventory versionedfile in target, convertin data.
404
The inventory is retrieved from the source, (deserializing it), and
405
stored in the target (reserializing it in a different format).
406
:param revs: The revisions to include
408
for tree in self.iter_rev_trees(revs):
409
parents = tree.get_parent_ids()
410
self.target.add_inventory(tree.get_revision_id(), tree.inventory,
413
def fetch_revisions(self, revision_ids):
414
for revision in self.source.get_revisions(revision_ids):
415
self.target.add_revision(revision.revision_id, revision)
418
class Model1toKnit2Fetcher(GenericRepoFetcher):
419
"""Fetch from a Model1 repository into a Knit2 repository
421
def __init__(self, to_repository, from_repository, last_revision=None,
422
pb=None, find_ghosts=True):
423
self.helper = Inter1and2Helper(from_repository, to_repository)
424
GenericRepoFetcher.__init__(self, to_repository, from_repository,
425
last_revision, pb, find_ghosts)
427
def _generate_root_texts(self, revs):
428
self.helper.generate_root_texts(revs)
430
def _fetch_inventory_weave(self, revs, pb):
431
self.helper.regenerate_inventory(revs)
433
def _copy_revision(self, rev, to_txn):
434
self.helper.fetch_revisions([rev])
437
class Knit1to2Fetcher(KnitRepoFetcher):
438
"""Fetch from a Knit1 repository into a Knit2 repository"""
440
def __init__(self, to_repository, from_repository, last_revision=None,
441
pb=None, find_ghosts=True):
442
self.helper = Inter1and2Helper(from_repository, to_repository)
443
KnitRepoFetcher.__init__(self, to_repository, from_repository,
444
last_revision, pb, find_ghosts)
446
def _generate_root_texts(self, revs):
447
self.helper.generate_root_texts(revs)
449
def _fetch_inventory_weave(self, revs, pb):
450
self.helper.regenerate_inventory(revs)
452
def _fetch_just_revision_texts(self, version_ids, from_transaction,
454
self.helper.fetch_revisions(version_ids)
457
class RemoteToOtherFetcher(GenericRepoFetcher):
459
def _fetch_everything_for_search(self, search, pp):
460
data_stream = self.from_repository.get_data_stream_for_search(search)
461
self.to_repository.insert_data_stream(data_stream)