/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/fetch.py

  • Committer: Jelmer Vernooij
  • Date: 2018-06-14 17:59:16 UTC
  • mto: This revision was merged to the branch mainline in revision 7065.
  • Revision ID: jelmer@jelmer.uk-20180614175916-a2e2xh5k533guq1x
Move breezy.plugins.git to breezy.git.

Show diffs side-by-side

added added

removed removed

Lines of Context:
22
22
branch.
23
23
"""
24
24
 
 
25
from __future__ import absolute_import
 
26
 
25
27
import operator
26
28
 
27
 
from ..lazy_import import lazy_import
 
29
from .lazy_import import lazy_import
28
30
lazy_import(globals(), """
29
31
from breezy import (
30
32
    tsort,
34
36
    vf_search,
35
37
    )
36
38
""")
37
 
from .. import (
 
39
from . import (
38
40
    errors,
39
41
    ui,
40
42
    )
41
 
from ..i18n import gettext
42
 
from ..revision import NULL_REVISION
43
 
from ..trace import mutter
 
43
from .i18n import gettext
 
44
from .revision import NULL_REVISION
 
45
from .sixish import (
 
46
    viewvalues,
 
47
    )
 
48
from .trace import mutter
44
49
 
45
50
 
46
51
class RepoFetcher(object):
51
56
    """
52
57
 
53
58
    def __init__(self, to_repository, from_repository, last_revision=None,
54
 
                 find_ghosts=True, fetch_spec=None):
 
59
        find_ghosts=True, fetch_spec=None):
55
60
        """Create a repo fetcher.
56
61
 
57
62
        :param last_revision: If set, try to limit to the data this revision
69
74
        self._last_revision = last_revision
70
75
        self._fetch_spec = fetch_spec
71
76
        self.find_ghosts = find_ghosts
72
 
        with self.from_repository.lock_read():
73
 
            mutter("Using fetch logic to copy between %s(%s) and %s(%s)",
74
 
                   str(self.from_repository), str(self.from_repository._format),
75
 
                   str(self.to_repository), str(self.to_repository._format))
 
77
        self.from_repository.lock_read()
 
78
        mutter("Using fetch logic to copy between %s(%s) and %s(%s)",
 
79
               str(self.from_repository), str(self.from_repository._format),
 
80
               str(self.to_repository), str(self.to_repository._format))
 
81
        try:
76
82
            self.__fetch()
 
83
        finally:
 
84
            self.from_repository.unlock()
77
85
 
78
86
    def __fetch(self):
79
87
        """Primary worker function.
110
118
        # moment, so that it can feed the progress information back to this
111
119
        # function?
112
120
        if (self.from_repository._format.rich_root_data and
113
 
                not self.to_repository._format.rich_root_data):
 
121
            not self.to_repository._format.rich_root_data):
114
122
            raise errors.IncompatibleRepositories(
115
123
                self.from_repository, self.to_repository,
116
124
                "different rich-root support")
156
164
            return vf_search.EmptySearchResult()
157
165
        elif self._last_revision is not None:
158
166
            return vf_search.NotInOtherForRevs(self.to_repository,
159
 
                                               self.from_repository, [
160
 
                                                   self._last_revision],
161
 
                                               find_ghosts=self.find_ghosts).execute()
162
 
        else:  # self._last_revision is None:
 
167
                self.from_repository, [self._last_revision],
 
168
                find_ghosts=self.find_ghosts).execute()
 
169
        else: # self._last_revision is None:
163
170
            return vf_search.EverythingNotInOther(self.to_repository,
164
 
                                                  self.from_repository,
165
 
                                                  find_ghosts=self.find_ghosts).execute()
 
171
                self.from_repository,
 
172
                find_ghosts=self.find_ghosts).execute()
166
173
 
167
174
 
168
175
class Inter1and2Helper(object):
203
210
    def _find_root_ids(self, revs, parent_map, graph):
204
211
        revision_root = {}
205
212
        for tree in self.iter_rev_trees(revs):
206
 
            root_id = tree.path2id('')
207
 
            revision_id = tree.get_file_revision(u'')
 
213
            root_id = tree.get_root_id()
 
214
            revision_id = tree.get_file_revision(u'', root_id)
208
215
            revision_root[revision_id] = root_id
209
216
        # Find out which parents we don't already know root ids for
210
 
        parents = set(parent_map.values())
 
217
        parents = set(viewvalues(parent_map))
211
218
        parents.difference_update(revision_root)
212
219
        parents.discard(NULL_REVISION)
213
220
        # Limit to revisions present in the versionedfile
214
221
        parents = graph.get_parent_map(parents)
215
222
        for tree in self.iter_rev_trees(parents):
216
 
            root_id = tree.path2id('')
 
223
            root_id = tree.get_root_id()
217
224
            revision_root[tree.get_revision_id()] = root_id
218
225
        return revision_root
219
226
 
227
234
        rev_order = tsort.topo_sort(parent_map)
228
235
        rev_id_to_root_id = self._find_root_ids(revs, parent_map, graph)
229
236
        root_id_order = [(rev_id_to_root_id[rev_id], rev_id) for rev_id in
230
 
                         rev_order]
 
237
            rev_order]
231
238
        # Guaranteed stable, this groups all the file id operations together
232
239
        # retaining topological order within the revisions of a file id.
233
240
        # File id splits and joins would invalidate this, but they don't exist
242
249
 
243
250
 
244
251
def _new_root_data_stream(
245
 
        root_keys_to_create, rev_id_to_root_id_map, parent_map, repo, graph=None):
 
252
    root_keys_to_create, rev_id_to_root_id_map, parent_map, repo, graph=None):
246
253
    """Generate a texts substream of synthesised root entries.
247
254
 
248
255
    Used in fetches that do rich-root upgrades.
249
 
 
 
256
    
250
257
    :param root_keys_to_create: iterable of (root_id, rev_id) pairs describing
251
258
        the root entries to create.
252
259
    :param rev_id_to_root_id_map: dict of known rev_id -> root_id mappings for
260
267
        root_id, rev_id = root_key
261
268
        parent_keys = _parent_keys_for_root_version(
262
269
            root_id, rev_id, rev_id_to_root_id_map, parent_map, repo, graph)
263
 
        yield versionedfile.ChunkedContentFactory(
264
 
            root_key, parent_keys, None, [])
 
270
        yield versionedfile.FulltextContentFactory(
 
271
            root_key, parent_keys, None, '')
265
272
 
266
273
 
267
274
def _parent_keys_for_root_version(
268
 
        root_id, rev_id, rev_id_to_root_id_map, parent_map, repo, graph=None):
 
275
    root_id, rev_id, rev_id_to_root_id_map, parent_map, repo, graph=None):
269
276
    """Get the parent keys for a given root id.
270
 
 
 
277
    
271
278
    A helper function for _new_root_data_stream.
272
279
    """
273
280
    # Include direct parents of the revision, but only if they used the same
288
295
                # But set parent_root_id to None since we don't really know
289
296
                parent_root_id = None
290
297
            else:
291
 
                parent_root_id = tree.path2id('')
 
298
                parent_root_id = tree.get_root_id()
292
299
            rev_id_to_root_id_map[parent_id] = None
293
300
            # XXX: why not:
294
301
            #   rev_id_to_root_id_map[parent_id] = parent_root_id
310
317
                pass
311
318
            else:
312
319
                try:
313
 
                    parent_ids.append(
314
 
                        tree.get_file_revision(
315
 
                            tree.id2path(root_id, recurse='none')))
 
320
                    parent_ids.append(tree.get_file_revision(tree.id2path(root_id), root_id))
316
321
                except errors.NoSuchId:
317
322
                    # not in the tree
318
323
                    pass
330
335
 
331
336
class TargetRepoKinds(object):
332
337
    """An enum-like set of constants.
333
 
 
 
338
    
334
339
    They are the possible values of FetchSpecFactory.target_repo_kinds.
335
340
    """
336
 
 
 
341
    
337
342
    PREEXISTING = 'preexisting'
338
343
    STACKED = 'stacked'
339
344
    EMPTY = 'empty'
415
420
                graph = self.source_repo.get_graph()
416
421
                topo_order = list(graph.iter_topo_order(ret.get_keys()))
417
422
                result_set = topo_order[:self.limit]
418
 
                ret = self.source_repo.revision_ids_to_search_result(
419
 
                    result_set)
 
423
                ret = self.source_repo.revision_ids_to_search_result(result_set)
420
424
            return ret
421
425
        else:
422
426
            return vf_search.NotInOtherForRevs(self.target_repo, self.source_repo,
423
 
                                               required_ids=heads_to_fetch, if_present_ids=if_present_fetch,
424
 
                                               limit=self.limit).execute()
 
427
                required_ids=heads_to_fetch, if_present_ids=if_present_fetch,
 
428
                limit=self.limit).execute()