1
# Copyright (C) 2005-2011 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
19
from .lazy_import import lazy_import
20
lazy_import(globals(), """
24
branch as _mod_branch,
26
conflicts as _mod_conflicts,
31
revision as _mod_revision,
40
from breezy.bzr import (
44
from breezy.i18n import gettext
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
from shutil import rmtree
21
from tempfile import mkdtemp
24
from bzrlib.branch import Branch
25
from bzrlib.conflicts import ConflictList, Conflict
26
from bzrlib.delta import compare_trees
27
from bzrlib.errors import (BzrCommandError,
37
WorkingTreeNotRevision,
40
from bzrlib.merge3 import Merge3
42
from bzrlib.osutils import rename, pathjoin
43
from progress import DummyProgress, ProgressPhase
44
from bzrlib.revision import common_ancestor, is_ancestor, NULL_REVISION
45
from bzrlib.symbol_versioning import *
46
from bzrlib.textfile import check_text_lines
47
from bzrlib.trace import mutter, warning, note
48
from bzrlib.transform import (TreeTransform, resolve_conflicts, cook_conflicts,
49
FinalPaths, create_by_entry, unique_add)
50
from bzrlib.versionedfile import WeaveMerge
55
53
# TODO: Report back as changes are merged in
58
def transform_tree(from_tree, to_tree, interesting_files=None):
59
with from_tree.lock_tree_write():
60
merge_inner(from_tree.branch, to_tree, from_tree,
61
ignore_zero=True, this_tree=from_tree,
62
interesting_files=interesting_files)
65
class MergeHooks(hooks.Hooks):
68
hooks.Hooks.__init__(self, "breezy.merge", "Merger.hooks")
69
self.add_hook('merge_file_content',
70
"Called with a breezy.merge.Merger object to create a per file "
71
"merge object when starting a merge. "
72
"Should return either None or a subclass of "
73
"``breezy.merge.AbstractPerFileMerger``. "
74
"Such objects will then be called per file "
75
"that needs to be merged (including when one "
76
"side has deleted the file and the other has changed it). "
77
"See the AbstractPerFileMerger API docs for details on how it is "
80
self.add_hook('pre_merge',
81
'Called before a merge. '
82
'Receives a Merger object as the single argument.',
84
self.add_hook('post_merge',
85
'Called after a merge. '
86
'Receives a Merger object as the single argument. '
87
'The return value is ignored.',
91
class AbstractPerFileMerger(object):
92
"""PerFileMerger objects are used by plugins extending merge for breezy.
94
See ``breezy.plugins.news_merge.news_merge`` for an example concrete class.
96
:ivar merger: The Merge3Merger performing the merge.
99
def __init__(self, merger):
100
"""Create a PerFileMerger for use with merger."""
103
def merge_contents(self, merge_params):
104
"""Attempt to merge the contents of a single file.
106
:param merge_params: A breezy.merge.MergeFileHookParams
107
:return: A tuple of (status, chunks), where status is one of
108
'not_applicable', 'success', 'conflicted', or 'delete'. If status
109
is 'success' or 'conflicted', then chunks should be an iterable of
110
strings for the new file contents.
112
return ('not applicable', None)
115
class PerFileMerger(AbstractPerFileMerger):
116
"""Merge individual files when self.file_matches returns True.
118
This class is intended to be subclassed. The file_matches and
119
merge_matching methods should be overridden with concrete implementations.
122
def file_matches(self, params):
123
"""Return True if merge_matching should be called on this file.
125
Only called with merges of plain files with no clear winner.
127
Subclasses must override this.
129
raise NotImplementedError(self.file_matches)
131
def merge_contents(self, params):
132
"""Merge the contents of a single file."""
133
# Check whether this custom merge logic should be used.
135
# OTHER is a straight winner, rely on default merge.
136
params.winner == 'other' or
137
# THIS and OTHER aren't both files.
138
not params.is_file_merge() or
139
# The filename doesn't match
140
not self.file_matches(params)):
141
return 'not_applicable', None
142
return self.merge_matching(params)
144
def merge_matching(self, params):
145
"""Merge the contents of a single file that has matched the criteria
146
in PerFileMerger.merge_contents (is a conflict, is a file,
147
self.file_matches is True).
149
Subclasses must override this.
151
raise NotImplementedError(self.merge_matching)
154
class ConfigurableFileMerger(PerFileMerger):
155
"""Merge individual files when configured via a .conf file.
157
This is a base class for concrete custom file merging logic. Concrete
158
classes should implement ``merge_text``.
160
See ``breezy.plugins.news_merge.news_merge`` for an example concrete class.
162
:ivar affected_files: The configured file paths to merge.
164
:cvar name_prefix: The prefix to use when looking up configuration
165
details. <name_prefix>_merge_files describes the files targeted by the
168
:cvar default_files: The default file paths to merge when no configuration
175
def __init__(self, merger):
176
super(ConfigurableFileMerger, self).__init__(merger)
177
self.affected_files = None
178
self.default_files = self.__class__.default_files or []
179
self.name_prefix = self.__class__.name_prefix
180
if self.name_prefix is None:
181
raise ValueError("name_prefix must be set.")
183
def file_matches(self, params):
184
"""Check whether the file should call the merge hook.
186
<name_prefix>_merge_files configuration variable is a list of files
187
that should use the hook.
189
affected_files = self.affected_files
190
if affected_files is None:
191
config = self.merger.this_branch.get_config()
192
# Until bzr provides a better policy for caching the config, we
193
# just add the part we're interested in to the params to avoid
194
# reading the config files repeatedly (breezy.conf, location.conf,
196
config_key = self.name_prefix + '_merge_files'
197
affected_files = config.get_user_option_as_list(config_key)
198
if affected_files is None:
199
# If nothing was specified in the config, use the default.
200
affected_files = self.default_files
201
self.affected_files = affected_files
203
filepath = params.this_path
204
if filepath in affected_files:
208
def merge_matching(self, params):
209
return self.merge_text(params)
211
def merge_text(self, params):
212
"""Merge the byte contents of a single file.
214
This is called after checking that the merge should be performed in
215
merge_contents, and it should behave as per
216
``breezy.merge.AbstractPerFileMerger.merge_contents``.
218
raise NotImplementedError(self.merge_text)
221
class MergeFileHookParams(object):
222
"""Object holding parameters passed to merge_file_content hooks.
224
There are some fields hooks can access:
226
:ivar base_path: Path in base tree
227
:ivar other_path: Path in other tree
228
:ivar this_path: Path in this tree
229
:ivar trans_id: the transform ID for the merge of this file
230
:ivar this_kind: kind of file in 'this' tree
231
:ivar other_kind: kind of file in 'other' tree
232
:ivar winner: one of 'this', 'other', 'conflict'
235
def __init__(self, merger, paths, trans_id, this_kind, other_kind,
237
self._merger = merger
239
self.base_path, self.other_path, self.this_path = paths
240
self.trans_id = trans_id
241
self.this_kind = this_kind
242
self.other_kind = other_kind
245
def is_file_merge(self):
246
"""True if this_kind and other_kind are both 'file'."""
247
return self.this_kind == 'file' and self.other_kind == 'file'
249
@decorators.cachedproperty
250
def base_lines(self):
251
"""The lines of the 'base' version of the file."""
252
return self._merger.get_lines(self._merger.base_tree, self.base_path)
254
@decorators.cachedproperty
255
def this_lines(self):
256
"""The lines of the 'this' version of the file."""
257
return self._merger.get_lines(self._merger.this_tree, self.this_path)
259
@decorators.cachedproperty
260
def other_lines(self):
261
"""The lines of the 'other' version of the file."""
262
return self._merger.get_lines(self._merger.other_tree, self.other_path)
55
def _get_tree(treespec, local_branch=None):
56
location, revno = treespec
57
branch = Branch.open_containing(location)[0]
61
revision = branch.last_revision()
63
revision = branch.get_rev_id(revno)
65
revision = NULL_REVISION
66
return branch, _get_revid_tree(branch, revision, local_branch)
69
def _get_revid_tree(branch, revision, local_branch):
71
base_tree = branch.bzrdir.open_workingtree()
73
if local_branch is not None:
74
if local_branch.base != branch.base:
75
local_branch.fetch(branch, revision)
76
base_tree = local_branch.repository.revision_tree(revision)
78
base_tree = branch.repository.revision_tree(revision)
82
def transform_tree(from_tree, to_tree, interesting_ids=None):
83
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
84
interesting_ids=interesting_ids, this_tree=from_tree)
265
87
class Merger(object):
269
def __init__(self, this_branch, other_tree=None, base_tree=None,
270
this_tree=None, change_reporter=None,
271
recurse='down', revision_graph=None):
88
def __init__(self, this_branch, other_tree=None, base_tree=None,
89
this_tree=None, pb=DummyProgress()):
272
90
object.__init__(self)
91
assert this_tree is not None, "this_tree is required"
273
92
self.this_branch = this_branch
274
self.this_basis = _mod_revision.ensure_null(
275
this_branch.last_revision())
93
self.this_basis = this_branch.last_revision()
276
94
self.this_rev_id = None
277
95
self.this_tree = this_tree
278
96
self.this_revision_tree = None
279
97
self.this_basis_tree = None
280
98
self.other_tree = other_tree
281
self.other_branch = None
282
99
self.base_tree = base_tree
283
100
self.ignore_zero = False
284
101
self.backup_files = False
285
self.interesting_files = None
102
self.interesting_ids = None
286
103
self.show_base = False
287
104
self.reprocess = False
289
self.recurse = recurse
290
self.change_reporter = change_reporter
291
self._cached_trees = {}
292
self._revision_graph = revision_graph
293
self._base_is_ancestor = None
294
self._base_is_other_ancestor = None
295
self._is_criss_cross = None
296
self._lca_trees = None
298
def cache_trees_with_revision_ids(self, trees):
299
"""Cache any tree in trees if it has a revision_id."""
300
for maybe_tree in trees:
301
if maybe_tree is None:
304
rev_id = maybe_tree.get_revision_id()
305
except AttributeError:
307
self._cached_trees[rev_id] = maybe_tree
310
def revision_graph(self):
311
if self._revision_graph is None:
312
self._revision_graph = self.this_branch.repository.get_graph()
313
return self._revision_graph
315
def _set_base_is_ancestor(self, value):
316
self._base_is_ancestor = value
318
def _get_base_is_ancestor(self):
319
if self._base_is_ancestor is None:
320
self._base_is_ancestor = self.revision_graph.is_ancestor(
321
self.base_rev_id, self.this_basis)
322
return self._base_is_ancestor
324
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
326
def _set_base_is_other_ancestor(self, value):
327
self._base_is_other_ancestor = value
329
def _get_base_is_other_ancestor(self):
330
if self._base_is_other_ancestor is None:
331
if self.other_basis is None:
333
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
334
self.base_rev_id, self.other_basis)
335
return self._base_is_other_ancestor
337
base_is_other_ancestor = property(_get_base_is_other_ancestor,
338
_set_base_is_other_ancestor)
341
def from_uncommitted(tree, other_tree, base_tree=None):
342
"""Return a Merger for uncommitted changes in other_tree.
344
:param tree: The tree to merge into
345
:param other_tree: The tree to get uncommitted changes from
346
:param base_tree: The basis to use for the merge. If unspecified,
347
other_tree.basis_tree() will be used.
349
if base_tree is None:
350
base_tree = other_tree.basis_tree()
351
merger = Merger(tree.branch, other_tree, base_tree, tree)
352
merger.base_rev_id = merger.base_tree.get_revision_id()
353
merger.other_rev_id = None
354
merger.other_basis = merger.base_rev_id
358
def from_mergeable(klass, tree, mergeable):
359
"""Return a Merger for a bundle or merge directive.
361
:param tree: The tree to merge changes into
362
:param mergeable: A merge directive or bundle
364
mergeable.install_revisions(tree.branch.repository)
365
base_revision_id, other_revision_id, verified =\
366
mergeable.get_merge_request(tree.branch.repository)
367
revision_graph = tree.branch.repository.get_graph()
368
if base_revision_id is not None:
369
if (base_revision_id != _mod_revision.NULL_REVISION and
370
revision_graph.is_ancestor(
371
base_revision_id, tree.branch.last_revision())):
372
base_revision_id = None
374
trace.warning('Performing cherrypick')
375
merger = klass.from_revision_ids(tree, other_revision_id,
376
base_revision_id, revision_graph=revision_graph)
377
return merger, verified
380
def from_revision_ids(tree, other, base=None, other_branch=None,
381
base_branch=None, revision_graph=None,
383
"""Return a Merger for revision-ids.
385
:param tree: The tree to merge changes into
386
:param other: The revision-id to use as OTHER
387
:param base: The revision-id to use as BASE. If not specified, will
389
:param other_branch: A branch containing the other revision-id. If
390
not supplied, tree.branch is used.
391
:param base_branch: A branch containing the base revision-id. If
392
not supplied, other_branch or tree.branch will be used.
393
:param revision_graph: If you have a revision_graph precomputed, pass
394
it in, otherwise it will be created for you.
395
:param tree_branch: The branch associated with tree. If not supplied,
396
tree.branch will be used.
398
if tree_branch is None:
399
tree_branch = tree.branch
400
merger = Merger(tree_branch, this_tree=tree,
401
revision_graph=revision_graph)
402
if other_branch is None:
403
other_branch = tree.branch
404
merger.set_other_revision(other, other_branch)
408
if base_branch is None:
409
base_branch = other_branch
410
merger.set_base_revision(base, base_branch)
413
def revision_tree(self, revision_id, branch=None):
414
if revision_id not in self._cached_trees:
416
branch = self.this_branch
418
tree = self.this_tree.revision_tree(revision_id)
419
except errors.NoSuchRevisionInTree:
420
tree = branch.repository.revision_tree(revision_id)
421
self._cached_trees[revision_id] = tree
422
return self._cached_trees[revision_id]
424
def _get_tree(self, treespec, possible_transports=None):
425
location, revno = treespec
427
tree = workingtree.WorkingTree.open_containing(location)[0]
428
return tree.branch, tree
429
branch = _mod_branch.Branch.open_containing(
430
location, possible_transports)[0]
432
revision_id = branch.last_revision()
434
revision_id = branch.get_rev_id(revno)
435
revision_id = _mod_revision.ensure_null(revision_id)
436
return branch, self.revision_tree(revision_id, branch)
109
def revision_tree(self, revision_id):
110
return self.this_branch.repository.revision_tree(revision_id)
112
def ensure_revision_trees(self):
113
if self.this_revision_tree is None:
114
self.this_basis_tree = self.this_branch.repository.revision_tree(
116
if self.this_basis == self.this_rev_id:
117
self.this_revision_tree = self.this_basis_tree
119
if self.other_rev_id is None:
120
other_basis_tree = self.revision_tree(self.other_basis)
121
changes = compare_trees(self.other_tree, other_basis_tree)
122
if changes.has_changed():
123
raise WorkingTreeNotRevision(self.this_tree)
124
other_rev_id = other_basis
125
self.other_tree = other_basis_tree
127
def file_revisions(self, file_id):
128
self.ensure_revision_trees()
129
def get_id(tree, file_id):
130
revision_id = tree.inventory[file_id].revision
131
assert revision_id is not None
133
if self.this_rev_id is None:
134
if self.this_basis_tree.get_file_sha1(file_id) != \
135
self.this_tree.get_file_sha1(file_id):
136
raise WorkingTreeNotRevision(self.this_tree)
138
trees = (self.this_basis_tree, self.other_tree)
139
return [get_id(tree, file_id) for tree in trees]
141
def check_basis(self, check_clean):
142
if self.this_basis is None:
143
raise BzrCommandError("This branch has no commits")
146
if self.this_basis != self.this_rev_id:
147
raise BzrCommandError("Working tree has uncommitted changes.")
149
def compare_basis(self):
150
changes = compare_trees(self.this_tree,
151
self.this_tree.basis_tree(), False)
152
if not changes.has_changed():
153
self.this_rev_id = self.this_basis
438
155
def set_interesting_files(self, file_list):
439
self.interesting_files = file_list
157
self._set_interesting_files(file_list)
158
except NotVersionedError, e:
159
raise BzrCommandError("%s is not a source file in any"
162
def _set_interesting_files(self, file_list):
163
"""Set the list of interesting ids from a list of files."""
164
if file_list is None:
165
self.interesting_ids = None
168
interesting_ids = set()
169
for path in file_list:
171
for tree in (self.this_tree, self.base_tree, self.other_tree):
172
file_id = tree.inventory.path2id(path)
173
if file_id is not None:
174
interesting_ids.add(file_id)
177
raise NotVersionedError(path=path)
178
self.interesting_ids = interesting_ids
441
180
def set_pending(self):
442
if (not self.base_is_ancestor or not self.base_is_other_ancestor
443
or self.other_rev_id is None):
447
def _add_parent(self):
448
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
449
new_parent_trees = []
450
with cleanup.ExitStack() as stack:
451
for revision_id in new_parents:
453
tree = self.revision_tree(revision_id)
454
except errors.NoSuchRevision:
457
stack.enter_context(tree.lock_read())
458
new_parent_trees.append((revision_id, tree))
459
self.this_tree.set_parent_trees(new_parent_trees, allow_leftmost_as_ghost=True)
461
def set_other(self, other_revision, possible_transports=None):
462
"""Set the revision and tree to merge from.
464
This sets the other_tree, other_rev_id, other_basis attributes.
466
:param other_revision: The [path, revision] list to merge from.
468
self.other_branch, self.other_tree = self._get_tree(other_revision,
181
if not self.base_is_ancestor:
183
if self.other_rev_id is None:
185
ancestry = self.this_branch.repository.get_ancestry(self.this_basis)
186
if self.other_rev_id in ancestry:
188
self.this_tree.add_pending_merge(self.other_rev_id)
190
def set_other(self, other_revision):
191
other_branch, self.other_tree = _get_tree(other_revision,
470
193
if other_revision[1] == -1:
471
self.other_rev_id = _mod_revision.ensure_null(
472
self.other_branch.last_revision())
473
if _mod_revision.is_null(self.other_rev_id):
474
raise errors.NoCommits(self.other_branch)
194
self.other_rev_id = other_branch.last_revision()
195
if self.other_rev_id is None:
196
raise NoCommits(other_branch)
475
197
self.other_basis = self.other_rev_id
476
198
elif other_revision[1] is not None:
477
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
199
self.other_rev_id = other_branch.get_rev_id(other_revision[1])
478
200
self.other_basis = self.other_rev_id
480
202
self.other_rev_id = None
481
self.other_basis = self.other_branch.last_revision()
203
self.other_basis = other_branch.last_revision()
482
204
if self.other_basis is None:
483
raise errors.NoCommits(self.other_branch)
484
if self.other_rev_id is not None:
485
self._cached_trees[self.other_rev_id] = self.other_tree
486
self._maybe_fetch(self.other_branch,
487
self.this_branch, self.other_basis)
489
def set_other_revision(self, revision_id, other_branch):
490
"""Set 'other' based on a branch and revision id
492
:param revision_id: The revision to use for a tree
493
:param other_branch: The branch containing this tree
495
self.other_rev_id = revision_id
496
self.other_branch = other_branch
497
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
498
self.other_tree = self.revision_tree(revision_id)
499
self.other_basis = revision_id
501
def set_base_revision(self, revision_id, branch):
502
"""Set 'base' based on a branch and revision id
504
:param revision_id: The revision to use for a tree
505
:param branch: The branch containing this tree
507
self.base_rev_id = revision_id
508
self.base_branch = branch
509
self._maybe_fetch(branch, self.this_branch, revision_id)
510
self.base_tree = self.revision_tree(revision_id)
512
def _maybe_fetch(self, source, target, revision_id):
513
if not source.repository.has_same_location(target.repository):
514
target.fetch(source, revision_id)
517
revisions = [_mod_revision.ensure_null(self.this_basis),
518
_mod_revision.ensure_null(self.other_basis)]
519
if _mod_revision.NULL_REVISION in revisions:
520
self.base_rev_id = _mod_revision.NULL_REVISION
521
self.base_tree = self.revision_tree(self.base_rev_id)
522
self._is_criss_cross = False
524
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
525
self._is_criss_cross = False
527
self.base_rev_id = _mod_revision.NULL_REVISION
529
self.base_rev_id = list(lcas)[0]
530
else: # len(lcas) > 1
531
self._is_criss_cross = True
533
# find_unique_lca can only handle 2 nodes, so we have to
534
# start back at the beginning. It is a shame to traverse
535
# the graph again, but better than re-implementing
537
self.base_rev_id = self.revision_graph.find_unique_lca(
538
revisions[0], revisions[1])
540
self.base_rev_id = self.revision_graph.find_unique_lca(
542
sorted_lca_keys = self.revision_graph.find_merge_order(
544
if self.base_rev_id == _mod_revision.NULL_REVISION:
545
self.base_rev_id = sorted_lca_keys[0]
547
if self.base_rev_id == _mod_revision.NULL_REVISION:
548
raise errors.UnrelatedBranches()
549
if self._is_criss_cross:
550
trace.warning('Warning: criss-cross merge encountered. See bzr'
551
' help criss-cross.')
552
trace.mutter('Criss-cross lcas: %r' % lcas)
553
if self.base_rev_id in lcas:
554
trace.mutter('Unable to find unique lca. '
555
'Fallback %r as best option.'
557
interesting_revision_ids = set(lcas)
558
interesting_revision_ids.add(self.base_rev_id)
559
interesting_trees = dict((t.get_revision_id(), t)
560
for t in self.this_branch.repository.revision_trees(
561
interesting_revision_ids))
562
self._cached_trees.update(interesting_trees)
563
if self.base_rev_id in lcas:
564
self.base_tree = interesting_trees[self.base_rev_id]
566
self.base_tree = interesting_trees.pop(self.base_rev_id)
567
self._lca_trees = [interesting_trees[key]
568
for key in sorted_lca_keys]
570
self.base_tree = self.revision_tree(self.base_rev_id)
571
self.base_is_ancestor = True
572
self.base_is_other_ancestor = True
573
trace.mutter('Base revid: %r' % self.base_rev_id)
205
raise NoCommits(other_branch)
206
if other_branch.base != self.this_branch.base:
207
self.this_branch.fetch(other_branch, last_revision=self.other_basis)
575
209
def set_base(self, base_revision):
576
"""Set the base revision to use for the merge.
578
:param base_revision: A 2-list containing a path and revision number.
580
trace.mutter("doing merge() with no base_revision specified")
210
mutter("doing merge() with no base_revision specified")
581
211
if base_revision == [None, None]:
213
pb = bzrlib.ui.ui_factory.nested_progress_bar()
215
this_repo = self.this_branch.repository
216
self.base_rev_id = common_ancestor(self.this_basis,
221
except NoCommonAncestor:
222
raise UnrelatedBranches()
223
self.base_tree = _get_revid_tree(self.this_branch, self.base_rev_id,
225
self.base_is_ancestor = True
584
base_branch, self.base_tree = self._get_tree(base_revision)
227
base_branch, self.base_tree = _get_tree(base_revision)
585
228
if base_revision[1] == -1:
586
229
self.base_rev_id = base_branch.last_revision()
587
230
elif base_revision[1] is None:
588
self.base_rev_id = _mod_revision.NULL_REVISION
231
self.base_rev_id = None
590
self.base_rev_id = _mod_revision.ensure_null(
591
base_branch.get_rev_id(base_revision[1]))
592
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
233
self.base_rev_id = base_branch.get_rev_id(base_revision[1])
234
if self.this_branch.base != base_branch.base:
235
self.this_branch.fetch(base_branch)
236
self.base_is_ancestor = is_ancestor(self.this_basis,
594
def make_merger(self):
595
kwargs = {'working_tree': self.this_tree, 'this_tree': self.this_tree,
596
'other_tree': self.other_tree,
597
'interesting_files': self.interesting_files,
598
'this_branch': self.this_branch,
599
'other_branch': self.other_branch,
241
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
242
'other_tree': self.other_tree,
243
'interesting_ids': self.interesting_ids,
601
245
if self.merge_type.requires_base:
602
246
kwargs['base_tree'] = self.base_tree
603
247
if self.merge_type.supports_reprocess:
604
248
kwargs['reprocess'] = self.reprocess
605
249
elif self.reprocess:
606
raise errors.BzrError(
607
"Conflict reduction is not supported for merge"
608
" type %s." % self.merge_type)
250
raise BzrError("Conflict reduction is not supported for merge"
251
" type %s." % self.merge_type)
609
252
if self.merge_type.supports_show_base:
610
253
kwargs['show_base'] = self.show_base
611
254
elif self.show_base:
612
raise errors.BzrError("Showing base is not supported for this"
255
raise BzrError("Showing base is not supported for this"
613
256
" merge type. %s" % self.merge_type)
614
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
615
and not self.base_is_other_ancestor):
616
raise errors.CannotReverseCherrypick()
617
if self.merge_type.supports_cherrypick:
618
kwargs['cherrypick'] = (not self.base_is_ancestor or
619
not self.base_is_other_ancestor)
620
if self._is_criss_cross and getattr(self.merge_type,
621
'supports_lca_trees', False):
622
kwargs['lca_trees'] = self._lca_trees
623
return self.merge_type(change_reporter=self.change_reporter,
626
def _do_merge_to(self):
627
merge = self.make_merger()
628
if self.other_branch is not None:
629
self.other_branch.update_references(self.this_branch)
630
for hook in Merger.hooks['pre_merge']:
633
for hook in Merger.hooks['post_merge']:
635
if self.recurse == 'down':
636
for relpath in self.this_tree.iter_references():
637
sub_tree = self.this_tree.get_nested_tree(relpath)
638
other_revision = self.other_tree.get_reference_revision(
640
if other_revision == sub_tree.last_revision():
642
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
643
sub_merge.merge_type = self.merge_type
644
other_branch = self.other_tree.reference_parent(relpath)
645
sub_merge.set_other_revision(other_revision, other_branch)
646
base_tree_path = _mod_tree.find_previous_path(
647
self.this_tree, self.base_tree, relpath)
648
base_revision = self.base_tree.get_reference_revision(
650
sub_merge.base_tree = \
651
sub_tree.branch.repository.revision_tree(base_revision)
652
sub_merge.base_rev_id = base_revision
657
with cleanup.ExitStack() as stack:
658
stack.enter_context(self.this_tree.lock_tree_write())
659
if self.base_tree is not None:
660
stack.enter_context(self.base_tree.lock_read())
661
if self.other_tree is not None:
662
stack.enter_context(self.other_tree.lock_read())
663
merge = self._do_merge_to()
257
merge = self.merge_type(pb=self._pb, **kwargs)
664
258
if len(merge.cooked_conflicts) == 0:
665
if not self.ignore_zero and not trace.is_quiet():
666
trace.note(gettext("All changes applied successfully."))
259
if not self.ignore_zero:
260
note("All changes applied successfully.")
668
trace.note(gettext("%d conflicts encountered.")
669
% len(merge.cooked_conflicts))
262
note("%d conflicts encountered." % len(merge.cooked_conflicts))
671
264
return len(merge.cooked_conflicts)
674
class _InventoryNoneEntry(object):
675
"""This represents an inventory entry which *isn't there*.
677
It simplifies the merging logic if we always have an InventoryEntry, even
678
if it isn't actually present
685
symlink_target = None
688
def is_unmodified(self, other):
692
_none_entry = _InventoryNoneEntry()
266
def regen_inventory(self, new_entries):
267
old_entries = self.this_tree.read_working_inventory()
271
for path, file_id in new_entries:
274
new_entries_map[file_id] = path
276
def id2path(file_id):
277
path = new_entries_map.get(file_id)
280
entry = old_entries[file_id]
281
if entry.parent_id is None:
283
return pathjoin(id2path(entry.parent_id), entry.name)
285
for file_id in old_entries:
286
entry = old_entries[file_id]
287
path = id2path(file_id)
288
if file_id in self.base_tree.inventory:
289
executable = getattr(self.base_tree.inventory[file_id], 'executable', False)
291
executable = getattr(entry, 'executable', False)
292
new_inventory[file_id] = (path, file_id, entry.parent_id,
293
entry.kind, executable)
295
by_path[path] = file_id
300
for path, file_id in new_entries:
302
del new_inventory[file_id]
305
new_path_list.append((path, file_id))
306
if file_id not in old_entries:
308
# Ensure no file is added before its parent
310
for path, file_id in new_path_list:
314
parent = by_path[os.path.dirname(path)]
315
abspath = pathjoin(self.this_tree.basedir, path)
316
kind = bzrlib.osutils.file_kind(abspath)
317
if file_id in self.base_tree.inventory:
318
executable = getattr(self.base_tree.inventory[file_id], 'executable', False)
321
new_inventory[file_id] = (path, file_id, parent, kind, executable)
322
by_path[path] = file_id
324
# Get a list in insertion order
325
new_inventory_list = new_inventory.values()
326
mutter ("""Inventory regeneration:
327
old length: %i insertions: %i deletions: %i new_length: %i"""\
328
% (len(old_entries), insertions, deletions,
329
len(new_inventory_list)))
330
assert len(new_inventory_list) == len(old_entries) + insertions\
332
new_inventory_list.sort()
333
return new_inventory_list
695
336
class Merge3Merger(object):
698
339
supports_reprocess = True
699
340
supports_show_base = True
700
341
history_based = False
701
supports_cherrypick = True
702
supports_reverse_cherrypick = True
703
winner_idx = {"this": 2, "other": 1, "conflict": 1}
704
supports_lca_trees = True
705
requires_file_merge_plan = False
707
def __init__(self, working_tree, this_tree, base_tree, other_tree,
708
reprocess=False, show_base=False,
709
change_reporter=None, interesting_files=None, do_merge=True,
710
cherrypick=False, lca_trees=None, this_branch=None,
712
"""Initialize the merger object and perform the merge.
714
:param working_tree: The working tree to apply the merge to
715
:param this_tree: The local tree in the merge operation
716
:param base_tree: The common tree in the merge operation
717
:param other_tree: The other tree to merge changes from
718
:param this_branch: The branch associated with this_tree. Defaults to
719
this_tree.branch if not supplied.
720
:param other_branch: The branch associated with other_tree, if any.
721
:param: reprocess If True, perform conflict-reduction processing.
722
:param show_base: If True, show the base revision in text conflicts.
723
(incompatible with reprocess)
724
:param change_reporter: An object that should report changes made
725
:param interesting_files: The tree-relative paths of files that should
726
participate in the merge. If these paths refer to directories,
727
the contents of those directories will also be included. If not
728
specified, all files may participate in the
730
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
731
if the ancestry was found to include a criss-cross merge.
732
Otherwise should be None.
343
def __init__(self, working_tree, this_tree, base_tree, other_tree,
344
interesting_ids=None, reprocess=False, show_base=False,
345
pb=DummyProgress(), pp=None):
346
"""Initialize the merger object and perform the merge."""
734
347
object.__init__(self)
735
if this_branch is None:
736
this_branch = this_tree.branch
737
self.interesting_files = interesting_files
738
self.working_tree = working_tree
739
self.this_tree = this_tree
348
self.this_tree = working_tree
740
349
self.base_tree = base_tree
741
350
self.other_tree = other_tree
742
self.this_branch = this_branch
743
self.other_branch = other_branch
744
351
self._raw_conflicts = []
745
352
self.cooked_conflicts = []
746
353
self.reprocess = reprocess
747
354
self.show_base = show_base
748
self._lca_trees = lca_trees
749
# Uncommenting this will change the default algorithm to always use
750
# _entries_lca. This can be useful for running the test suite and
751
# making sure we haven't missed any corner cases.
752
# if lca_trees is None:
753
# self._lca_trees = [self.base_tree]
754
self.change_reporter = change_reporter
755
self.cherrypick = cherrypick
358
self.pp = ProgressPhase("Merge phase", 3, self.pb)
760
with cleanup.ExitStack() as stack:
761
stack.enter_context(self.working_tree.lock_tree_write())
762
stack.enter_context(self.this_tree.lock_read())
763
stack.enter_context(self.base_tree.lock_read())
764
stack.enter_context(self.other_tree.lock_read())
765
self.tt = self.working_tree.get_transform()
766
stack.enter_context(self.tt)
767
self._compute_transform()
768
results = self.tt.apply(no_conflicts=True)
360
if interesting_ids is not None:
361
all_ids = interesting_ids
363
all_ids = set(base_tree)
364
all_ids.update(other_tree)
365
working_tree.lock_write()
366
self.tt = TreeTransform(working_tree, self.pb)
369
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
371
for num, file_id in enumerate(all_ids):
372
child_pb.update('Preparing file merge', num, len(all_ids))
373
self.merge_names(file_id)
374
file_status = self.merge_contents(file_id)
375
self.merge_executable(file_id, file_status)
380
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
382
fs_conflicts = resolve_conflicts(self.tt, child_pb)
385
self.cook_conflicts(fs_conflicts)
386
for conflict in self.cooked_conflicts:
389
results = self.tt.apply()
769
390
self.write_modified(results)
771
self.working_tree.add_conflicts(self.cooked_conflicts)
772
except errors.UnsupportedOperation:
775
def make_preview_transform(self):
776
with self.base_tree.lock_read(), self.other_tree.lock_read():
777
self.tt = transform.TransformPreview(self.working_tree)
778
self._compute_transform()
781
def _compute_transform(self):
782
if self._lca_trees is None:
783
entries = self._entries3()
784
resolver = self._three_way
786
entries = self._entries_lca()
787
resolver = self._lca_multi_way
788
# Prepare merge hooks
789
factories = Merger.hooks['merge_file_content']
790
# One hook for each registered one plus our default merger
791
hooks = [factory(self) for factory in factories] + [self]
792
self.active_hooks = [hook for hook in hooks if hook is not None]
793
with ui.ui_factory.nested_progress_bar() as child_pb:
794
for num, (file_id, changed, paths3, parents3, names3,
795
executable3) in enumerate(entries):
796
trans_id = self.tt.trans_id_file_id(file_id)
798
# Try merging each entry
799
child_pb.update(gettext('Preparing file merge'),
801
self._merge_names(trans_id, file_id, paths3, parents3,
802
names3, resolver=resolver)
804
file_status = self._do_merge_contents(paths3, trans_id, file_id)
806
file_status = 'unmodified'
807
self._merge_executable(paths3, trans_id, executable3,
808
file_status, resolver=resolver)
809
self.tt.fixup_new_roots()
810
self._finish_computing_transform()
812
def _finish_computing_transform(self):
813
"""Finalize the transform and report the changes.
815
This is the second half of _compute_transform.
817
with ui.ui_factory.nested_progress_bar() as child_pb:
818
fs_conflicts = transform.resolve_conflicts(
820
lambda t, c: transform.conflict_pass(t, c, self.other_tree))
821
if self.change_reporter is not None:
822
from breezy import delta
823
delta.report_changes(
824
self.tt.iter_changes(), self.change_reporter)
825
self.cook_conflicts(fs_conflicts)
826
for conflict in self.cooked_conflicts:
827
trace.warning('%s', conflict.describe())
830
"""Gather data about files modified between three trees.
832
Return a list of tuples of file_id, changed, parents3, names3,
833
executable3. changed is a boolean indicating whether the file contents
834
or kind were changed. parents3 is a tuple of parent ids for base,
835
other and this. names3 is a tuple of names for base, other and this.
836
executable3 is a tuple of execute-bit values for base, other and this.
839
iterator = self.other_tree.iter_changes(self.base_tree,
840
specific_files=self.interesting_files,
841
extra_trees=[self.this_tree])
842
this_interesting_files = self.this_tree.find_related_paths_across_trees(
843
self.interesting_files, trees=[self.other_tree])
844
this_entries = dict(self.this_tree.iter_entries_by_dir(
845
specific_files=this_interesting_files))
846
for change in iterator:
847
if change.path[0] is not None:
848
this_path = _mod_tree.find_previous_path(
849
self.base_tree, self.this_tree, change.path[0])
851
this_path = _mod_tree.find_previous_path(
852
self.other_tree, self.this_tree, change.path[1])
853
this_entry = this_entries.get(this_path)
854
if this_entry is not None:
855
this_name = this_entry.name
856
this_parent = this_entry.parent_id
857
this_executable = this_entry.executable
861
this_executable = None
862
parents3 = change.parent_id + (this_parent,)
863
names3 = change.name + (this_name,)
864
paths3 = change.path + (this_path, )
865
executable3 = change.executable + (this_executable,)
867
(change.file_id, change.changed_content, paths3,
868
parents3, names3, executable3))
871
def _entries_lca(self):
872
"""Gather data about files modified between multiple trees.
874
This compares OTHER versus all LCA trees, and for interesting entries,
875
it then compares with THIS and BASE.
877
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
879
:return: [(file_id, changed, paths, parents, names, executable)], where:
881
* file_id: Simple file_id of the entry
882
* changed: Boolean, True if the kind or contents changed else False
883
* paths: ((base, [path, in, lcas]), path_other, path_this)
884
* parents: ((base, [parent_id, in, lcas]), parent_id_other,
886
* names: ((base, [name, in, lcas]), name_in_other, name_in_this)
887
* executable: ((base, [exec, in, lcas]), exec_in_other,
890
if self.interesting_files is not None:
891
lookup_trees = [self.this_tree, self.base_tree]
892
lookup_trees.extend(self._lca_trees)
893
# I think we should include the lca trees as well
894
interesting_files = self.other_tree.find_related_paths_across_trees(
895
self.interesting_files, lookup_trees)
897
interesting_files = None
899
from .multiwalker import MultiWalker
900
walker = MultiWalker(self.other_tree, self._lca_trees)
902
for other_path, file_id, other_ie, lca_values in walker.iter_all():
903
# Is this modified at all from any of the other trees?
905
other_ie = _none_entry
907
if interesting_files is not None and other_path not in interesting_files:
910
# If other_revision is found in any of the lcas, that means this
911
# node is uninteresting. This is because when merging, if there are
912
# multiple heads(), we have to create a new node. So if we didn't,
913
# we know that the ancestry is linear, and that OTHER did not
915
# See doc/developers/lca_merge_resolution.txt for details
916
# We can't use this shortcut when other_revision is None,
917
# because it may be None because things are WorkingTrees, and
918
# not because it is *actually* None.
919
is_unmodified = False
920
for lca_path, ie in lca_values:
921
if ie is not None and other_ie.is_unmodified(ie):
929
for lca_path, lca_ie in lca_values:
931
lca_entries.append(_none_entry)
932
lca_paths.append(None)
934
lca_entries.append(lca_ie)
935
lca_paths.append(lca_path)
938
base_path = self.base_tree.id2path(file_id)
939
except errors.NoSuchId:
941
base_ie = _none_entry
943
base_ie = next(self.base_tree.iter_entries_by_dir(specific_files=[base_path]))[1]
946
this_path = self.this_tree.id2path(file_id)
947
except errors.NoSuchId:
948
this_ie = _none_entry
951
this_ie = next(self.this_tree.iter_entries_by_dir(specific_files=[this_path]))[1]
957
for lca_ie in lca_entries:
958
lca_kinds.append(lca_ie.kind)
959
lca_parent_ids.append(lca_ie.parent_id)
960
lca_names.append(lca_ie.name)
961
lca_executable.append(lca_ie.executable)
963
kind_winner = self._lca_multi_way(
964
(base_ie.kind, lca_kinds),
965
other_ie.kind, this_ie.kind)
966
parent_id_winner = self._lca_multi_way(
967
(base_ie.parent_id, lca_parent_ids),
968
other_ie.parent_id, this_ie.parent_id)
969
name_winner = self._lca_multi_way(
970
(base_ie.name, lca_names),
971
other_ie.name, this_ie.name)
973
content_changed = True
974
if kind_winner == 'this':
975
# No kind change in OTHER, see if there are *any* changes
976
if other_ie.kind == 'directory':
977
if parent_id_winner == 'this' and name_winner == 'this':
978
# No change for this directory in OTHER, skip
980
content_changed = False
981
elif other_ie.kind is None or other_ie.kind == 'file':
982
def get_sha1(tree, path):
986
return tree.get_file_sha1(path)
987
except errors.NoSuchFile:
989
base_sha1 = get_sha1(self.base_tree, base_path)
990
lca_sha1s = [get_sha1(tree, lca_path)
992
in zip(self._lca_trees, lca_paths)]
993
this_sha1 = get_sha1(self.this_tree, this_path)
994
other_sha1 = get_sha1(self.other_tree, other_path)
995
sha1_winner = self._lca_multi_way(
996
(base_sha1, lca_sha1s), other_sha1, this_sha1,
997
allow_overriding_lca=False)
998
exec_winner = self._lca_multi_way(
999
(base_ie.executable, lca_executable),
1000
other_ie.executable, this_ie.executable)
1001
if (parent_id_winner == 'this' and name_winner == 'this'
1002
and sha1_winner == 'this' and exec_winner == 'this'):
1003
# No kind, parent, name, exec, or content change for
1004
# OTHER, so this node is not considered interesting
1006
if sha1_winner == 'this':
1007
content_changed = False
1008
elif other_ie.kind == 'symlink':
1009
def get_target(ie, tree, path):
1010
if ie.kind != 'symlink':
1012
return tree.get_symlink_target(path)
1013
base_target = get_target(base_ie, self.base_tree, base_path)
1014
lca_targets = [get_target(ie, tree, lca_path) for ie, tree, lca_path
1015
in zip(lca_entries, self._lca_trees, lca_paths)]
1016
this_target = get_target(
1017
this_ie, self.this_tree, this_path)
1018
other_target = get_target(
1019
other_ie, self.other_tree, other_path)
1020
target_winner = self._lca_multi_way(
1021
(base_target, lca_targets),
1022
other_target, this_target)
1023
if (parent_id_winner == 'this' and name_winner == 'this'
1024
and target_winner == 'this'):
1025
# No kind, parent, name, or symlink target change
1028
if target_winner == 'this':
1029
content_changed = False
1030
elif other_ie.kind == 'tree-reference':
1031
# The 'changed' information seems to be handled at a higher
1032
# level. At least, _entries3 returns False for content
1033
# changed, even when at a new revision_id.
1034
content_changed = False
1035
if (parent_id_winner == 'this' and name_winner == 'this'):
1036
# Nothing interesting
1039
raise AssertionError('unhandled kind: %s' % other_ie.kind)
1041
# If we have gotten this far, that means something has changed
1042
result.append((file_id, content_changed,
1043
((base_path, lca_paths),
1044
other_path, this_path),
1045
((base_ie.parent_id, lca_parent_ids),
1046
other_ie.parent_id, this_ie.parent_id),
1047
((base_ie.name, lca_names),
1048
other_ie.name, this_ie.name),
1049
((base_ie.executable, lca_executable),
1050
other_ie.executable, this_ie.executable)
392
working_tree.set_conflicts(ConflictList(self.cooked_conflicts))
393
except UnsupportedOperation:
400
working_tree.unlock()
1054
403
def write_modified(self, results):
1055
if not self.working_tree.supports_merge_modified():
1057
404
modified_hashes = {}
1058
405
for path in results.modified_paths:
1059
wt_relpath = self.working_tree.relpath(path)
1060
if not self.working_tree.is_versioned(wt_relpath):
406
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
1062
hash = self.working_tree.get_file_sha1(wt_relpath)
409
hash = self.this_tree.get_file_sha1(file_id)
1063
410
if hash is None:
1065
modified_hashes[wt_relpath] = hash
1066
self.working_tree.set_merge_modified(modified_hashes)
412
modified_hashes[file_id] = hash
413
self.this_tree.set_merge_modified(modified_hashes)
416
def parent(entry, file_id):
1070
417
"""Determine the parent for a file_id (used as a key method)"""
1071
418
if entry is None:
1073
420
return entry.parent_id
423
def name(entry, file_id):
1077
424
"""Determine the name for a file_id (used as a key method)"""
1078
425
if entry is None:
1080
427
return entry.name
1083
def contents_sha1(tree, path):
430
def contents_sha1(tree, file_id):
1084
431
"""Determine the sha1 of the file contents (used as a key method)."""
1086
return tree.get_file_sha1(path)
1087
except errors.NoSuchFile:
432
if file_id not in tree:
434
return tree.get_file_sha1(file_id)
1091
def executable(tree, path):
437
def executable(tree, file_id):
1092
438
"""Determine the executability of a file-id (used as a key method)."""
1094
if tree.kind(path) != "file":
1096
except errors.NoSuchFile:
439
if file_id not in tree:
1098
return tree.is_executable(path)
441
if tree.kind(file_id) != "file":
443
return tree.is_executable(file_id)
1101
def kind(tree, path):
446
def kind(tree, file_id):
1102
447
"""Determine the kind of a file-id (used as a key method)."""
1104
return tree.kind(path)
1105
except errors.NoSuchFile:
448
if file_id not in tree:
450
return tree.kind(file_id)
1109
def _three_way(base, other, this):
1111
# if 'base == other', either they all agree, or only 'this' has
1114
elif this not in (base, other):
1115
# 'this' is neither 'base' nor 'other', so both sides changed
1118
# "Ambiguous clean merge" -- both sides have made the same change.
453
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
454
"""Do a three-way test on a scalar.
455
Return "this", "other" or "conflict", depending whether a value wins.
457
key_base = key(base_tree, file_id)
458
key_other = key(other_tree, file_id)
459
#if base == other, either they all agree, or only THIS has changed.
460
if key_base == key_other:
462
key_this = key(this_tree, file_id)
463
if key_this not in (key_base, key_other):
465
# "Ambiguous clean merge"
466
elif key_this == key_other:
1121
# this == base: only other has changed.
469
assert key_this == key_base
1125
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
1126
"""Consider LCAs when determining whether a change has occurred.
1128
If LCAS are all identical, this is the same as a _three_way comparison.
1130
:param bases: value in (BASE, [LCAS])
1131
:param other: value in OTHER
1132
:param this: value in THIS
1133
:param allow_overriding_lca: If there is more than one unique lca
1134
value, allow OTHER to override THIS if it has a new value, and
1135
THIS only has an lca value, or vice versa. This is appropriate for
1136
truly scalar values, not as much for non-scalars.
1137
:return: 'this', 'other', or 'conflict' depending on whether an entry
1140
# See doc/developers/lca_tree_merging.txt for details about this
1143
# Either Ambiguously clean, or nothing was actually changed. We
1146
base_val, lca_vals = bases
1147
# Remove 'base_val' from the lca_vals, because it is not interesting
1148
filtered_lca_vals = [lca_val for lca_val in lca_vals
1149
if lca_val != base_val]
1150
if len(filtered_lca_vals) == 0:
1151
return Merge3Merger._three_way(base_val, other, this)
1153
unique_lca_vals = set(filtered_lca_vals)
1154
if len(unique_lca_vals) == 1:
1155
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1157
if allow_overriding_lca:
1158
if other in unique_lca_vals:
1159
if this in unique_lca_vals:
1160
# Each side picked a different lca, conflict
1163
# This has a value which supersedes both lca values, and
1164
# other only has an lca value
1166
elif this in unique_lca_vals:
1167
# OTHER has a value which supersedes both lca values, and this
1168
# only has an lca value
1171
# At this point, the lcas disagree, and the tip disagree
1174
def _merge_names(self, trans_id, file_id, paths, parents, names, resolver):
1175
"""Perform a merge on file names and parents"""
1176
base_name, other_name, this_name = names
1177
base_parent, other_parent, this_parent = parents
1178
unused_base_path, other_path, this_path = paths
1180
name_winner = resolver(*names)
1182
parent_id_winner = resolver(*parents)
1183
if this_name is None:
472
def merge_names(self, file_id):
473
"""Perform a merge on file_id names and parents"""
475
if file_id in tree.inventory:
476
return tree.inventory[file_id]
479
this_entry = get_entry(self.this_tree)
480
other_entry = get_entry(self.other_tree)
481
base_entry = get_entry(self.base_tree)
482
name_winner = self.scalar_three_way(this_entry, base_entry,
483
other_entry, file_id, self.name)
484
parent_id_winner = self.scalar_three_way(this_entry, base_entry,
485
other_entry, file_id,
487
if this_entry is None:
1184
488
if name_winner == "this":
1185
489
name_winner = "other"
1186
490
if parent_id_winner == "this":
1187
491
parent_id_winner = "other"
1188
492
if name_winner == "this" and parent_id_winner == "this":
1190
if name_winner == 'conflict' or parent_id_winner == 'conflict':
1191
# Creating helpers (.OTHER or .THIS) here cause problems down the
1192
# road if a ContentConflict needs to be created so we should not do
1194
self._raw_conflicts.append(('path conflict', trans_id, file_id,
1195
this_parent, this_name,
1196
other_parent, other_name))
1197
if other_path is None:
1198
# it doesn't matter whether the result was 'other' or
1199
# 'conflict'-- if it has no file id, we leave it alone.
494
if name_winner == "conflict":
495
trans_id = self.tt.trans_id_file_id(file_id)
496
self._raw_conflicts.append(('name conflict', trans_id,
497
self.name(this_entry, file_id),
498
self.name(other_entry, file_id)))
499
if parent_id_winner == "conflict":
500
trans_id = self.tt.trans_id_file_id(file_id)
501
self._raw_conflicts.append(('parent conflict', trans_id,
502
self.parent(this_entry, file_id),
503
self.parent(other_entry, file_id)))
504
if other_entry is None:
505
# it doesn't matter whether the result was 'other' or
506
# 'conflict'-- if there's no 'other', we leave it alone.
1201
parent_id = parents[self.winner_idx[parent_id_winner]]
1202
name = names[self.winner_idx[name_winner]]
1203
if parent_id is not None or name is not None:
1204
# if we get here, name_winner and parent_winner are set to safe
1206
if parent_id is None and name is not None:
1207
# if parent_id is None and name is non-None, current file is
1209
if names[self.winner_idx[parent_id_winner]] != '':
1210
raise AssertionError(
1211
'File looks like a root, but named %s' %
1212
names[self.winner_idx[parent_id_winner]])
1213
parent_trans_id = transform.ROOT_PARENT
1215
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1216
self.tt.adjust_path(name, parent_trans_id, trans_id)
508
# if we get here, name_winner and parent_winner are set to safe values.
509
winner_entry = {"this": this_entry, "other": other_entry,
510
"conflict": other_entry}
511
trans_id = self.tt.trans_id_file_id(file_id)
512
parent_id = winner_entry[parent_id_winner].parent_id
513
parent_trans_id = self.tt.trans_id_file_id(parent_id)
514
self.tt.adjust_path(winner_entry[name_winner].name, parent_trans_id,
1218
def _do_merge_contents(self, paths, trans_id, file_id):
1219
"""Performs a merge on file_id contents."""
1220
def contents_pair(tree, path):
1224
kind = tree.kind(path)
1225
except errors.NoSuchFile:
517
def merge_contents(self, file_id):
518
"""Performa a merge on file_id contents."""
519
def contents_pair(tree):
520
if file_id not in tree:
522
kind = tree.kind(file_id)
523
if kind == "root_directory":
1227
525
if kind == "file":
1228
contents = tree.get_file_sha1(path)
526
contents = tree.get_file_sha1(file_id)
1229
527
elif kind == "symlink":
1230
contents = tree.get_symlink_target(path)
528
contents = tree.get_symlink_target(file_id)
1233
531
return kind, contents
1235
base_path, other_path, this_path = paths
533
def contents_conflict():
534
trans_id = self.tt.trans_id_file_id(file_id)
535
name = self.tt.final_name(trans_id)
536
parent_id = self.tt.final_parent(trans_id)
537
if file_id in self.this_tree.inventory:
538
self.tt.unversion_file(trans_id)
539
self.tt.delete_contents(trans_id)
540
file_group = self._dump_conflicts(name, parent_id, file_id,
542
self._raw_conflicts.append(('contents conflict', file_group))
1236
544
# See SPOT run. run, SPOT, run.
1237
545
# So we're not QUITE repeating ourselves; we do tricky things with
1239
other_pair = contents_pair(self.other_tree, other_path)
1240
this_pair = contents_pair(self.this_tree, this_path)
1242
(base_path, lca_paths) = base_path
1243
base_pair = contents_pair(self.base_tree, base_path)
1244
lca_pairs = [contents_pair(tree, path)
1245
for tree, path in zip(self._lca_trees, lca_paths)]
1246
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1247
this_pair, allow_overriding_lca=False)
547
base_pair = contents_pair(self.base_tree)
548
other_pair = contents_pair(self.other_tree)
549
if base_pair == other_pair:
550
# OTHER introduced no changes
552
this_pair = contents_pair(self.this_tree)
553
if this_pair == other_pair:
554
# THIS and OTHER introduced the same changes
1249
base_pair = contents_pair(self.base_tree, base_path)
1250
if base_pair == other_pair:
1253
# We delayed evaluating this_pair as long as we can to avoid
1254
# unnecessary sha1 calculation
1255
this_pair = contents_pair(self.this_tree, this_path)
1256
winner = self._three_way(base_pair, other_pair, this_pair)
1257
if winner == 'this':
1258
# No interesting changes introduced by OTHER
1260
# We have a hypothetical conflict, but if we have files, then we
1261
# can try to merge the content
1262
params = MergeFileHookParams(
1263
self, (base_path, other_path, this_path), trans_id, this_pair[0],
1264
other_pair[0], winner)
1265
hooks = self.active_hooks
1266
hook_status = 'not_applicable'
1268
hook_status, lines = hook.merge_contents(params)
1269
if hook_status != 'not_applicable':
1270
# Don't try any more hooks, this one applies.
1272
# If the merge ends up replacing the content of the file, we get rid of
1273
# it at the end of this method (this variable is used to track the
1274
# exceptions to this rule).
1277
if hook_status == 'not_applicable':
1278
# No merge hook was able to resolve the situation. Two cases exist:
1279
# a content conflict or a duplicate one.
1281
name = self.tt.final_name(trans_id)
1282
parent_id = self.tt.final_parent(trans_id)
1283
inhibit_content_conflict = False
1284
if params.this_kind is None: # file_id is not in THIS
1285
# Is the name used for a different file_id ?
1286
if self.this_tree.is_versioned(other_path):
1287
# Two entries for the same path
1289
# versioning the merged file will trigger a duplicate
1291
self.tt.version_file(file_id, trans_id)
1292
transform.create_from_tree(
1293
self.tt, trans_id, self.other_tree,
1295
filter_tree_path=self._get_filter_tree_path(other_path))
1296
inhibit_content_conflict = True
1297
elif params.other_kind is None: # file_id is not in OTHER
1298
# Is the name used for a different file_id ?
1299
if self.other_tree.is_versioned(this_path):
1300
# Two entries for the same path again, but here, the other
1301
# entry will also be merged. We simply inhibit the
1302
# 'content' conflict creation because we know OTHER will
1303
# create (or has already created depending on ordering) an
1304
# entry at the same path. This will trigger a 'duplicate'
1307
inhibit_content_conflict = True
1308
if not inhibit_content_conflict:
1309
if params.this_kind is not None:
557
trans_id = self.tt.trans_id_file_id(file_id)
558
if this_pair == base_pair:
559
# only OTHER introduced changes
560
if file_id in self.this_tree:
561
# Remove any existing contents
562
self.tt.delete_contents(trans_id)
563
if file_id in self.other_tree:
564
# OTHER changed the file
565
create_by_entry(self.tt,
566
self.other_tree.inventory[file_id],
567
self.other_tree, trans_id)
568
if file_id not in self.this_tree.inventory:
569
self.tt.version_file(file_id, trans_id)
571
elif file_id in self.this_tree.inventory:
572
# OTHER deleted the file
1310
573
self.tt.unversion_file(trans_id)
1311
# This is a contents conflict, because none of the available
1312
# functions could merge it.
1313
file_group = self._dump_conflicts(
1314
name, (base_path, other_path, this_path), parent_id,
1315
file_id, set_version=True)
1316
self._raw_conflicts.append(('contents conflict', file_group))
1317
elif hook_status == 'success':
1318
self.tt.create_file(lines, trans_id)
1319
elif hook_status == 'conflicted':
1320
# XXX: perhaps the hook should be able to provide
1321
# the BASE/THIS/OTHER files?
1322
self.tt.create_file(lines, trans_id)
1323
self._raw_conflicts.append(('text conflict', trans_id))
1324
name = self.tt.final_name(trans_id)
1325
parent_id = self.tt.final_parent(trans_id)
1326
self._dump_conflicts(
1327
name, (base_path, other_path, this_path), parent_id, file_id)
1328
elif hook_status == 'delete':
1329
self.tt.unversion_file(trans_id)
1331
elif hook_status == 'done':
1332
# The hook function did whatever it needs to do directly, no
1333
# further action needed here.
1336
raise AssertionError('unknown hook_status: %r' % (hook_status,))
1337
if not this_path and result == "modified":
1338
self.tt.version_file(file_id, trans_id)
1340
# The merge has been performed and produced a new content, so the
1341
# old contents should not be retained.
1342
self.tt.delete_contents(trans_id)
1345
def _default_other_winner_merge(self, merge_hook_params):
1346
"""Replace this contents with other."""
1347
trans_id = merge_hook_params.trans_id
1348
if merge_hook_params.other_path is not None:
1349
# OTHER changed the file
1350
transform.create_from_tree(
1351
self.tt, trans_id, self.other_tree,
1352
merge_hook_params.other_path,
1353
filter_tree_path=self._get_filter_tree_path(merge_hook_params.other_path))
1355
elif merge_hook_params.this_path is not None:
1356
# OTHER deleted the file
1357
return 'delete', None
1359
raise AssertionError(
1360
'winner is OTHER, but file %r not in THIS or OTHER tree'
1361
% (merge_hook_params.base_path,))
1363
def merge_contents(self, merge_hook_params):
1364
"""Fallback merge logic after user installed hooks."""
1365
# This function is used in merge hooks as the fallback instance.
1366
# Perhaps making this function and the functions it calls be a
1367
# a separate class would be better.
1368
if merge_hook_params.winner == 'other':
1369
# OTHER is a straight winner, so replace this contents with other
1370
return self._default_other_winner_merge(merge_hook_params)
1371
elif merge_hook_params.is_file_merge():
1372
# THIS and OTHER are both files, so text merge. Either
1373
# BASE is a file, or both converted to files, so at least we
1374
# have agreement that output should be a file.
1376
self.text_merge(merge_hook_params.trans_id,
1377
merge_hook_params.paths)
1378
except errors.BinaryFile:
1379
return 'not_applicable', None
1382
return 'not_applicable', None
1384
def get_lines(self, tree, path):
575
#BOTH THIS and OTHER introduced changes; scalar conflict
576
elif this_pair[0] == "file" and other_pair[0] == "file":
577
# THIS and OTHER are both files, so text merge. Either
578
# BASE is a file, or both converted to files, so at least we
579
# have agreement that output should be a file.
581
self.text_merge(file_id, trans_id)
583
return contents_conflict()
584
if file_id not in self.this_tree.inventory:
585
self.tt.version_file(file_id, trans_id)
587
self.tt.tree_kind(trans_id)
588
self.tt.delete_contents(trans_id)
593
# Scalar conflict, can't text merge. Dump conflicts
594
return contents_conflict()
596
def get_lines(self, tree, file_id):
1385
597
"""Return the lines in a file, or an empty list."""
1389
kind = tree.kind(path)
1390
except errors.NoSuchFile:
599
return tree.get_file(file_id).readlines()
1395
return tree.get_file_lines(path)
1397
def text_merge(self, trans_id, paths):
1398
"""Perform a three-way text merge on a file"""
603
def text_merge(self, file_id, trans_id):
604
"""Perform a three-way text merge on a file_id"""
1399
605
# it's possible that we got here with base as a different type.
1400
606
# if so, we just want two-way text conflicts.
1401
base_path, other_path, this_path = paths
1402
base_lines = self.get_lines(self.base_tree, base_path)
1403
other_lines = self.get_lines(self.other_tree, other_path)
1404
this_lines = self.get_lines(self.this_tree, this_path)
1405
m3 = merge3.Merge3(base_lines, this_lines, other_lines,
1406
is_cherrypick=self.cherrypick)
1407
start_marker = b"!START OF MERGE CONFLICT!" + b"I HOPE THIS IS UNIQUE"
607
if file_id in self.base_tree and \
608
self.base_tree.kind(file_id) == "file":
609
base_lines = self.get_lines(self.base_tree, file_id)
612
other_lines = self.get_lines(self.other_tree, file_id)
613
this_lines = self.get_lines(self.this_tree, file_id)
614
m3 = Merge3(base_lines, this_lines, other_lines)
615
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
1408
616
if self.show_base is True:
1409
base_marker = b'|' * 7
617
base_marker = '|' * 7
1411
619
base_marker = None
1413
621
def iter_merge3(retval):
1414
622
retval["text_conflicts"] = False
1415
for line in m3.merge_lines(name_a=b"TREE",
1416
name_b=b"MERGE-SOURCE",
1417
name_base=b"BASE-REVISION",
1418
start_marker=start_marker,
623
for line in m3.merge_lines(name_a = "TREE",
624
name_b = "MERGE-SOURCE",
625
name_base = "BASE-REVISION",
626
start_marker=start_marker,
1419
627
base_marker=base_marker,
1420
628
reprocess=self.reprocess):
1421
629
if line.startswith(start_marker):
1422
630
retval["text_conflicts"] = True
1423
yield line.replace(start_marker, b'<' * 7)
631
yield line.replace(start_marker, '<' * 7)
1458
651
determined automatically. If set_version is true, the .OTHER, .THIS
1459
652
or .BASE (in that order) will be created as versioned files.
1461
base_path, other_path, this_path = paths
1462
data = [('OTHER', self.other_tree, other_path, other_lines),
1463
('THIS', self.this_tree, this_path, this_lines)]
654
data = [('OTHER', self.other_tree, other_lines),
655
('THIS', self.this_tree, this_lines)]
1465
data.append(('BASE', self.base_tree, base_path, base_lines))
1467
# We need to use the actual path in the working tree of the file here,
1468
# ignoring the conflict suffixes
1470
if wt.supports_content_filtering():
1472
filter_tree_path = wt.id2path(file_id)
1473
except errors.NoSuchId:
1474
# file has been deleted
1475
filter_tree_path = None
1477
# Skip the id2path lookup for older formats
1478
filter_tree_path = None
657
data.append(('BASE', self.base_tree, base_lines))
1480
658
versioned = False
1482
for suffix, tree, path, lines in data:
1483
if path is not None:
1484
trans_id = self._conflict_file(
1485
name, parent_id, path, tree, suffix, lines,
660
for suffix, tree, lines in data:
662
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1487
664
file_group.append(trans_id)
1488
665
if set_version and not versioned:
1489
666
self.tt.version_file(file_id, trans_id)
1490
667
versioned = True
1491
668
return file_group
1493
def _conflict_file(self, name, parent_id, path, tree, suffix,
1494
lines=None, filter_tree_path=None):
670
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1495
672
"""Emit a single conflict file."""
1496
673
name = name + '.' + suffix
1497
674
trans_id = self.tt.create_path(name, parent_id)
1498
transform.create_from_tree(
1499
self.tt, trans_id, tree, path,
1501
filter_tree_path=filter_tree_path)
675
entry = tree.inventory[file_id]
676
create_by_entry(self.tt, entry, tree, trans_id, lines)
1504
def _merge_executable(self, paths, trans_id, executable, file_status,
679
def merge_executable(self, file_id, file_status):
1506
680
"""Perform a merge on the execute bit."""
1507
base_executable, other_executable, this_executable = executable
1508
base_path, other_path, this_path = paths
1509
681
if file_status == "deleted":
1511
winner = resolver(*executable)
683
trans_id = self.tt.trans_id_file_id(file_id)
685
if self.tt.final_kind(trans_id) != "file":
689
winner = self.scalar_three_way(self.this_tree, self.base_tree,
690
self.other_tree, file_id,
1512
692
if winner == "conflict":
1513
# There must be a None in here, if we have a conflict, but we
1514
# need executability since file status was not deleted.
1515
if other_path is None:
693
# There must be a None in here, if we have a conflict, but we
694
# need executability since file status was not deleted.
695
if self.other_tree.is_executable(file_id) is None:
1518
698
winner = "other"
1519
if winner == 'this' and file_status != "modified":
1521
if self.tt.final_kind(trans_id) != "file":
1523
699
if winner == "this":
1524
executability = this_executable
700
if file_status == "modified":
701
executability = self.this_tree.is_executable(file_id)
702
if executability is not None:
703
trans_id = self.tt.trans_id_file_id(file_id)
704
self.tt.set_executability(executability, trans_id)
1526
if other_path is not None:
1527
executability = other_executable
1528
elif this_path is not None:
1529
executability = this_executable
1530
elif base_path is not None:
1531
executability = base_executable
1532
if executability is not None:
1533
self.tt.set_executability(executability, trans_id)
706
assert winner == "other"
707
if file_id in self.other_tree:
708
executability = self.other_tree.is_executable(file_id)
709
elif file_id in self.this_tree:
710
executability = self.this_tree.is_executable(file_id)
711
elif file_id in self.base_tree:
712
executability = self.base_tree.is_executable(file_id)
713
if executability is not None:
714
trans_id = self.tt.trans_id_file_id(file_id)
715
self.tt.set_executability(executability, trans_id)
1535
717
def cook_conflicts(self, fs_conflicts):
1536
718
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1537
content_conflict_file_ids = set()
1538
cooked_conflicts = transform.cook_conflicts(fs_conflicts, self.tt)
1539
fp = transform.FinalPaths(self.tt)
719
from conflicts import Conflict
721
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
722
fp = FinalPaths(self.tt)
1540
723
for conflict in self._raw_conflicts:
1541
724
conflict_type = conflict[0]
1542
if conflict_type == 'path conflict':
1544
this_parent, this_name,
1545
other_parent, other_name) = conflict[1:]
1546
if this_parent is None or this_name is None:
1547
this_path = '<deleted>'
1549
parent_path = fp.get_path(
1550
self.tt.trans_id_file_id(this_parent))
1551
this_path = osutils.pathjoin(parent_path, this_name)
1552
if other_parent is None or other_name is None:
1553
other_path = '<deleted>'
1555
if other_parent == self.other_tree.path2id(''):
1556
# The tree transform doesn't know about the other root,
1557
# so we special case here to avoid a NoFinalPath
1561
parent_path = fp.get_path(
1562
self.tt.trans_id_file_id(other_parent))
1563
other_path = osutils.pathjoin(parent_path, other_name)
1564
c = _mod_conflicts.Conflict.factory(
1565
'path conflict', path=this_path,
1566
conflict_path=other_path,
1568
elif conflict_type == 'contents conflict':
725
if conflict_type in ('name conflict', 'parent conflict'):
726
trans_id = conflict[1]
727
conflict_args = conflict[2:]
728
if trans_id not in name_conflicts:
729
name_conflicts[trans_id] = {}
730
unique_add(name_conflicts[trans_id], conflict_type,
732
if conflict_type == 'contents conflict':
1569
733
for trans_id in conflict[1]:
1570
734
file_id = self.tt.final_file_id(trans_id)
1571
735
if file_id is not None:
1572
# Ok we found the relevant file-id
1574
737
path = fp.get_path(trans_id)
1575
738
for suffix in ('.BASE', '.THIS', '.OTHER'):
1576
739
if path.endswith(suffix):
1577
# Here is the raw path
1578
740
path = path[:-len(suffix)]
1580
c = _mod_conflicts.Conflict.factory(conflict_type,
1581
path=path, file_id=file_id)
1582
content_conflict_file_ids.add(file_id)
1583
elif conflict_type == 'text conflict':
742
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
743
self.cooked_conflicts.append(c)
744
if conflict_type == 'text conflict':
1584
745
trans_id = conflict[1]
1585
746
path = fp.get_path(trans_id)
1586
747
file_id = self.tt.final_file_id(trans_id)
1587
c = _mod_conflicts.Conflict.factory(conflict_type,
1588
path=path, file_id=file_id)
748
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
749
self.cooked_conflicts.append(c)
751
for trans_id, conflicts in name_conflicts.iteritems():
753
this_parent, other_parent = conflicts['parent conflict']
754
assert this_parent != other_parent
756
this_parent = other_parent = \
757
self.tt.final_file_id(self.tt.final_parent(trans_id))
759
this_name, other_name = conflicts['name conflict']
760
assert this_name != other_name
762
this_name = other_name = self.tt.final_name(trans_id)
763
other_path = fp.get_path(trans_id)
764
if this_parent is not None:
766
fp.get_path(self.tt.trans_id_file_id(this_parent))
767
this_path = pathjoin(this_parent_path, this_name)
1590
raise AssertionError('bad conflict type: %r' % (conflict,))
1591
cooked_conflicts.append(c)
1593
self.cooked_conflicts = []
1594
# We want to get rid of path conflicts when a corresponding contents
1595
# conflict exists. This can occur when one branch deletes a file while
1596
# the other renames *and* modifies it. In this case, the content
1597
# conflict is enough.
1598
for c in cooked_conflicts:
1599
if (c.typestring == 'path conflict'
1600
and c.file_id in content_conflict_file_ids):
769
this_path = "<deleted>"
770
file_id = self.tt.final_file_id(trans_id)
771
c = Conflict.factory('path conflict', path=this_path,
772
conflict_path=other_path, file_id=file_id)
1602
773
self.cooked_conflicts.append(c)
1603
self.cooked_conflicts.sort(key=_mod_conflicts.Conflict.sort_key)
774
self.cooked_conflicts.sort(key=Conflict.sort_key)
1606
777
class WeaveMerger(Merge3Merger):
1607
778
"""Three-way tree merger, text weave merger."""
1608
779
supports_reprocess = True
1609
780
supports_show_base = False
1610
supports_reverse_cherrypick = False
1611
history_based = True
1612
requires_file_merge_plan = True
1614
def _generate_merge_plan(self, this_path, base):
1615
return self.this_tree.plan_file_merge(this_path, self.other_tree,
1618
def _merged_lines(self, this_path):
782
def __init__(self, working_tree, this_tree, base_tree, other_tree,
783
interesting_ids=None, pb=DummyProgress(), pp=None,
785
self.this_revision_tree = self._get_revision_tree(this_tree)
786
self.other_revision_tree = self._get_revision_tree(other_tree)
787
super(WeaveMerger, self).__init__(working_tree, this_tree,
788
base_tree, other_tree,
789
interesting_ids=interesting_ids,
790
pb=pb, pp=pp, reprocess=reprocess)
792
def _get_revision_tree(self, tree):
793
"""Return a revision tree releated to this tree.
794
If the tree is a WorkingTree, the basis will be returned.
796
if getattr(tree, 'get_weave', False) is False:
797
# If we have a WorkingTree, try using the basis
798
return tree.branch.basis_tree()
802
def _check_file(self, file_id):
803
"""Check that the revision tree's version of the file matches."""
804
for tree, rt in ((self.this_tree, self.this_revision_tree),
805
(self.other_tree, self.other_revision_tree)):
808
if tree.get_file_sha1(file_id) != rt.get_file_sha1(file_id):
809
raise WorkingTreeNotRevision(self.this_tree)
811
def _merged_lines(self, file_id):
1619
812
"""Generate the merged lines.
1620
813
There is no distinction between lines that are meant to contain <<<<<<<
1624
base = self.base_tree
1627
plan = self._generate_merge_plan(this_path, base)
1628
if 'merge' in debug.debug_flags:
1630
trans_id = self.tt.trans_id_file_id(file_id)
1631
name = self.tt.final_name(trans_id) + '.plan'
1632
contents = (b'%11s|%s' % l for l in plan)
1633
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1634
textmerge = versionedfile.PlanWeaveMerge(plan, b'<<<<<<< TREE\n',
1635
b'>>>>>>> MERGE-SOURCE\n')
1636
lines, conflicts = textmerge.merge_lines(self.reprocess)
1638
base_lines = textmerge.base_from_plan()
1641
return lines, base_lines
816
weave = self.this_revision_tree.get_weave(file_id)
817
this_revision_id = self.this_revision_tree.inventory[file_id].revision
818
other_revision_id = \
819
self.other_revision_tree.inventory[file_id].revision
820
wm = WeaveMerge(weave, this_revision_id, other_revision_id,
821
'<<<<<<< TREE\n', '>>>>>>> MERGE-SOURCE\n')
822
return wm.merge_lines(self.reprocess)
1643
def text_merge(self, trans_id, paths):
824
def text_merge(self, file_id, trans_id):
1644
825
"""Perform a (weave) text merge for a given file and file-id.
1645
826
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1646
827
and a conflict will be noted.
1648
base_path, other_path, this_path = paths
1649
lines, base_lines = self._merged_lines(this_path)
829
self._check_file(file_id)
830
lines, conflicts = self._merged_lines(file_id)
1650
831
lines = list(lines)
1651
# Note we're checking whether the OUTPUT is binary in this case,
832
# Note we're checking whether the OUTPUT is binary in this case,
1652
833
# because we don't want to get into weave merge guts.
1653
textfile.check_text_lines(lines)
834
check_text_lines(lines)
1654
835
self.tt.create_file(lines, trans_id)
1655
if base_lines is not None:
1657
837
self._raw_conflicts.append(('text conflict', trans_id))
1658
838
name = self.tt.final_name(trans_id)
1659
839
parent_id = self.tt.final_parent(trans_id)
1660
file_id = self.tt.final_file_id(trans_id)
1661
file_group = self._dump_conflicts(name, paths, parent_id, file_id,
1663
base_lines=base_lines)
840
file_group = self._dump_conflicts(name, parent_id, file_id,
1664
842
file_group.append(trans_id)
1667
class LCAMerger(WeaveMerger):
1669
requires_file_merge_plan = True
1671
def _generate_merge_plan(self, this_path, base):
1672
return self.this_tree.plan_file_lca_merge(this_path, self.other_tree,
1676
845
class Diff3Merger(Merge3Merger):
1677
846
"""Three-way merger using external diff3 for text merging"""
1679
requires_file_merge_plan = False
1681
def dump_file(self, temp_dir, name, tree, path):
1682
out_path = osutils.pathjoin(temp_dir, name)
1683
with open(out_path, "wb") as out_file:
1684
in_file = tree.get_file(path)
1685
for line in in_file:
1686
out_file.write(line)
847
def dump_file(self, temp_dir, name, tree, file_id):
848
out_path = pathjoin(temp_dir, name)
849
out_file = file(out_path, "wb")
850
in_file = tree.get_file(file_id)
1689
def text_merge(self, trans_id, paths):
855
def text_merge(self, file_id, trans_id):
1690
856
"""Perform a diff3 merge using a specified file-id and trans-id.
1691
857
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1692
858
will be dumped, and a will be conflict noted.
1695
base_path, other_path, this_path = paths
1696
temp_dir = osutils.mkdtemp(prefix="bzr-")
861
temp_dir = mkdtemp(prefix="bzr-")
1698
new_file = osutils.pathjoin(temp_dir, "new")
1699
this = self.dump_file(
1700
temp_dir, "this", self.this_tree, this_path)
1701
base = self.dump_file(
1702
temp_dir, "base", self.base_tree, base_path)
1703
other = self.dump_file(
1704
temp_dir, "other", self.other_tree, other_path)
1705
status = breezy.patch.diff3(new_file, this, base, other)
863
new_file = pathjoin(temp_dir, "new")
864
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
865
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
866
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
867
status = bzrlib.patch.diff3(new_file, this, base, other)
1706
868
if status not in (0, 1):
1707
raise errors.BzrError("Unhandled diff3 exit code")
1708
with open(new_file, 'rb') as f:
1709
self.tt.create_file(f, trans_id)
869
raise BzrError("Unhandled diff3 exit code")
870
self.tt.create_file(file(new_file, "rb"), trans_id)
1711
872
name = self.tt.final_name(trans_id)
1712
873
parent_id = self.tt.final_parent(trans_id)
1713
file_id = self.tt.final_file_id(trans_id)
1714
self._dump_conflicts(name, paths, parent_id, file_id)
1715
self._raw_conflicts.append(('text conflict', trans_id))
874
self._dump_conflicts(name, parent_id, file_id)
875
self._raw_conflicts.append(('text conflict', trans_id))
1717
osutils.rmtree(temp_dir)
1720
class PathNotInTree(errors.BzrError):
1722
_fmt = """Merge-into failed because %(tree)s does not contain %(path)s."""
1724
def __init__(self, path, tree):
1725
errors.BzrError.__init__(self, path=path, tree=tree)
1728
class MergeIntoMerger(Merger):
1729
"""Merger that understands other_tree will be merged into a subdir.
1731
This also changes the Merger api so that it uses real Branch, revision_id,
1732
and RevisonTree objects, rather than using revision specs.
1735
def __init__(self, this_tree, other_branch, other_tree, target_subdir,
1736
source_subpath, other_rev_id=None):
1737
"""Create a new MergeIntoMerger object.
1739
source_subpath in other_tree will be effectively copied to
1740
target_subdir in this_tree.
1742
:param this_tree: The tree that we will be merging into.
1743
:param other_branch: The Branch we will be merging from.
1744
:param other_tree: The RevisionTree object we want to merge.
1745
:param target_subdir: The relative path where we want to merge
1746
other_tree into this_tree
1747
:param source_subpath: The relative path specifying the subtree of
1748
other_tree to merge into this_tree.
1750
# It is assumed that we are merging a tree that is not in our current
1751
# ancestry, which means we are using the "EmptyTree" as our basis.
1752
null_ancestor_tree = this_tree.branch.repository.revision_tree(
1753
_mod_revision.NULL_REVISION)
1754
super(MergeIntoMerger, self).__init__(
1755
this_branch=this_tree.branch,
1756
this_tree=this_tree,
1757
other_tree=other_tree,
1758
base_tree=null_ancestor_tree,
1760
self._target_subdir = target_subdir
1761
self._source_subpath = source_subpath
1762
self.other_branch = other_branch
1763
if other_rev_id is None:
1764
other_rev_id = other_tree.get_revision_id()
1765
self.other_rev_id = self.other_basis = other_rev_id
1766
self.base_is_ancestor = True
1767
self.backup_files = True
1768
self.merge_type = Merge3Merger
1769
self.show_base = False
1770
self.reprocess = False
1771
self.interesting_files = None
1772
self.merge_type = _MergeTypeParameterizer(MergeIntoMergeType,
1773
target_subdir=self._target_subdir,
1774
source_subpath=self._source_subpath)
1775
if self._source_subpath != '':
1776
# If this isn't a partial merge make sure the revisions will be
1778
self._maybe_fetch(self.other_branch, self.this_branch,
1781
def set_pending(self):
1782
if self._source_subpath != '':
1784
Merger.set_pending(self)
1787
class _MergeTypeParameterizer(object):
1788
"""Wrap a merge-type class to provide extra parameters.
1790
This is hack used by MergeIntoMerger to pass some extra parameters to its
1791
merge_type. Merger.do_merge() sets up its own set of parameters to pass to
1792
the 'merge_type' member. It is difficult override do_merge without
1793
re-writing the whole thing, so instead we create a wrapper which will pass
1794
the extra parameters.
1797
def __init__(self, merge_type, **kwargs):
1798
self._extra_kwargs = kwargs
1799
self._merge_type = merge_type
1801
def __call__(self, *args, **kwargs):
1802
kwargs.update(self._extra_kwargs)
1803
return self._merge_type(*args, **kwargs)
1805
def __getattr__(self, name):
1806
return getattr(self._merge_type, name)
1809
class MergeIntoMergeType(Merge3Merger):
1810
"""Merger that incorporates a tree (or part of a tree) into another."""
1812
def __init__(self, *args, **kwargs):
1813
"""Initialize the merger object.
1815
:param args: See Merge3Merger.__init__'s args.
1816
:param kwargs: See Merge3Merger.__init__'s keyword args, except for
1817
source_subpath and target_subdir.
1818
:keyword source_subpath: The relative path specifying the subtree of
1819
other_tree to merge into this_tree.
1820
:keyword target_subdir: The relative path where we want to merge
1821
other_tree into this_tree
1823
# All of the interesting work happens during Merge3Merger.__init__(),
1824
# so we have have to hack in to get our extra parameters set.
1825
self._source_subpath = kwargs.pop('source_subpath')
1826
self._target_subdir = kwargs.pop('target_subdir')
1827
super(MergeIntoMergeType, self).__init__(*args, **kwargs)
1829
def _compute_transform(self):
1830
with ui.ui_factory.nested_progress_bar() as child_pb:
1831
entries = self._entries_to_incorporate()
1832
entries = list(entries)
1833
for num, (entry, parent_id, relpath) in enumerate(entries):
1834
child_pb.update(gettext('Preparing file merge'),
1836
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1837
path = osutils.pathjoin(self._source_subpath, relpath)
1838
trans_id = transform.new_by_entry(path, self.tt, entry,
1839
parent_trans_id, self.other_tree)
1840
self._finish_computing_transform()
1842
def _entries_to_incorporate(self):
1843
"""Yields pairs of (inventory_entry, new_parent)."""
1844
subdir_id = self.other_tree.path2id(self._source_subpath)
1845
if subdir_id is None:
1846
# XXX: The error would be clearer if it gave the URL of the source
1847
# branch, but we don't have a reference to that here.
1848
raise PathNotInTree(self._source_subpath, "Source tree")
1849
subdir = next(self.other_tree.iter_entries_by_dir(
1850
specific_files=[self._source_subpath]))[1]
1851
parent_in_target = osutils.dirname(self._target_subdir)
1852
target_id = self.this_tree.path2id(parent_in_target)
1853
if target_id is None:
1854
raise PathNotInTree(self._target_subdir, "Target tree")
1855
name_in_target = osutils.basename(self._target_subdir)
1856
merge_into_root = subdir.copy()
1857
merge_into_root.name = name_in_target
1859
self.this_tree.id2path(merge_into_root.file_id)
1860
except errors.NoSuchId:
1863
# Give the root a new file-id.
1864
# This can happen fairly easily if the directory we are
1865
# incorporating is the root, and both trees have 'TREE_ROOT' as
1866
# their root_id. Users will expect this to Just Work, so we
1867
# change the file-id here.
1868
# Non-root file-ids could potentially conflict too. That's really
1869
# an edge case, so we don't do anything special for those. We let
1870
# them cause conflicts.
1871
merge_into_root.file_id = generate_ids.gen_file_id(name_in_target)
1872
yield (merge_into_root, target_id, '')
1873
if subdir.kind != 'directory':
1874
# No children, so we are done.
1876
for path, entry in self.other_tree.root_inventory.iter_entries_by_dir(subdir_id):
1877
parent_id = entry.parent_id
1878
if parent_id == subdir.file_id:
1879
# The root's parent ID has changed, so make sure children of
1880
# the root refer to the new ID.
1881
parent_id = merge_into_root.file_id
1882
yield (entry, parent_id, path)
1885
880
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1887
merge_type=Merge3Merger,
882
merge_type=Merge3Merger,
883
interesting_ids=None,
1890
886
other_rev_id=None,
1891
887
interesting_files=None,
1893
change_reporter=None):
1894
"""Primary interface for merging.
1896
Typical use is probably::
1898
merge_inner(branch, branch.get_revision_tree(other_revision),
1899
branch.get_revision_tree(base_revision))
890
"""Primary interface for merging.
892
typical use is probably
893
'merge_inner(branch, branch.get_revision_tree(other_revision),
894
branch.get_revision_tree(base_revision))'
1901
896
if this_tree is None:
1902
raise errors.BzrError("breezy.merge.merge_inner requires a this_tree "
1904
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1905
change_reporter=change_reporter)
897
warn("bzrlib.merge.merge_inner requires a this_tree parameter as of "
898
"bzrlib version 0.8.",
901
this_tree = this_branch.bzrdir.open_workingtree()
902
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1906
904
merger.backup_files = backup_files
1907
905
merger.merge_type = merge_type
906
merger.interesting_ids = interesting_ids
1908
907
merger.ignore_zero = ignore_zero
1909
merger.interesting_files = interesting_files
1910
merger.show_base = show_base
908
if interesting_files:
909
assert not interesting_ids, ('Only supply interesting_ids'
910
' or interesting_files')
911
merger._set_interesting_files(interesting_files)
912
merger.show_base = show_base
1911
913
merger.reprocess = reprocess
1912
914
merger.other_rev_id = other_rev_id
1913
915
merger.other_basis = other_rev_id
1914
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1915
if get_revision_id is None:
1916
get_revision_id = base_tree.last_revision
1917
merger.cache_trees_with_revision_ids([other_tree, base_tree, this_tree])
1918
merger.set_base_revision(get_revision_id(), this_branch)
1919
916
return merger.do_merge()
1922
merge_type_registry = registry.Registry()
1923
merge_type_registry.register('diff3', Diff3Merger,
1924
"Merge using external diff3.")
1925
merge_type_registry.register('lca', LCAMerger,
1926
"LCA-newness merge.")
1927
merge_type_registry.register('merge3', Merge3Merger,
1928
"Native diff3-style merge.")
1929
merge_type_registry.register('weave', WeaveMerger,
1930
"Weave-based merge.")
1933
def get_merge_type_registry():
1934
"""Merge type registry was previously in breezy.option
1936
This method provides a backwards compatible way to retrieve it.
1938
return merge_type_registry
1941
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1942
def status_a(revision, text):
1943
if revision in ancestors_b:
1944
return 'killed-b', text
1946
return 'new-a', text
1948
def status_b(revision, text):
1949
if revision in ancestors_a:
1950
return 'killed-a', text
1952
return 'new-b', text
1954
plain_a = [t for (a, t) in annotated_a]
1955
plain_b = [t for (a, t) in annotated_b]
1956
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1957
blocks = matcher.get_matching_blocks()
1960
for ai, bi, l in blocks:
1961
# process all mismatched sections
1962
# (last mismatched section is handled because blocks always
1963
# includes a 0-length last block)
1964
for revision, text in annotated_a[a_cur:ai]:
1965
yield status_a(revision, text)
1966
for revision, text in annotated_b[b_cur:bi]:
1967
yield status_b(revision, text)
1968
# and now the matched section
1971
for text_a in plain_a[ai:a_cur]:
1972
yield "unchanged", text_a
1975
class _PlanMergeBase(object):
1977
def __init__(self, a_rev, b_rev, vf, key_prefix):
1980
:param a_rev: Revision-id of one revision to merge
1981
:param b_rev: Revision-id of the other revision to merge
1982
:param vf: A VersionedFiles containing both revisions
1983
:param key_prefix: A prefix for accessing keys in vf, typically
1989
self._last_lines = None
1990
self._last_lines_revision_id = None
1991
self._cached_matching_blocks = {}
1992
self._key_prefix = key_prefix
1993
self._precache_tip_lines()
1995
def _precache_tip_lines(self):
1996
lines = self.get_lines([self.a_rev, self.b_rev])
1997
self.lines_a = lines[self.a_rev]
1998
self.lines_b = lines[self.b_rev]
2000
def get_lines(self, revisions):
2001
"""Get lines for revisions from the backing VersionedFiles.
2003
:raises RevisionNotPresent: on absent texts.
2005
keys = [(self._key_prefix + (rev,)) for rev in revisions]
2007
for record in self.vf.get_record_stream(keys, 'unordered', True):
2008
if record.storage_kind == 'absent':
2009
raise errors.RevisionNotPresent(record.key, self.vf)
2010
result[record.key[-1]] = record.get_bytes_as('lines')
2013
def plan_merge(self):
2014
"""Generate a 'plan' for merging the two revisions.
2016
This involves comparing their texts and determining the cause of
2017
differences. If text A has a line and text B does not, then either the
2018
line was added to text A, or it was deleted from B. Once the causes
2019
are combined, they are written out in the format described in
2020
VersionedFile.plan_merge
2022
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
2023
unique_a, unique_b = self._unique_lines(blocks)
2024
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
2025
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
2026
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
2028
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
2031
for i, j, n in blocks:
2032
for a_index in range(last_i, i):
2033
if a_index in new_a:
2034
if a_index in killed_b:
2035
yield 'conflicted-a', self.lines_a[a_index]
2037
yield 'new-a', self.lines_a[a_index]
2039
yield 'killed-b', self.lines_a[a_index]
2040
for b_index in range(last_j, j):
2041
if b_index in new_b:
2042
if b_index in killed_a:
2043
yield 'conflicted-b', self.lines_b[b_index]
2045
yield 'new-b', self.lines_b[b_index]
2047
yield 'killed-a', self.lines_b[b_index]
2048
# handle common lines
2049
for a_index in range(i, i + n):
2050
yield 'unchanged', self.lines_a[a_index]
2054
def _get_matching_blocks(self, left_revision, right_revision):
2055
"""Return a description of which sections of two revisions match.
2057
See SequenceMatcher.get_matching_blocks
2059
cached = self._cached_matching_blocks.get((left_revision,
2061
if cached is not None:
2063
if self._last_lines_revision_id == left_revision:
2064
left_lines = self._last_lines
2065
right_lines = self.get_lines([right_revision])[right_revision]
2067
lines = self.get_lines([left_revision, right_revision])
2068
left_lines = lines[left_revision]
2069
right_lines = lines[right_revision]
2070
self._last_lines = right_lines
2071
self._last_lines_revision_id = right_revision
2072
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
2074
return matcher.get_matching_blocks()
2076
def _unique_lines(self, matching_blocks):
2077
"""Analyse matching_blocks to determine which lines are unique
2079
:return: a tuple of (unique_left, unique_right), where the values are
2080
sets of line numbers of unique lines.
2086
for i, j, n in matching_blocks:
2087
unique_left.extend(range(last_i, i))
2088
unique_right.extend(range(last_j, j))
2091
return unique_left, unique_right
2094
def _subtract_plans(old_plan, new_plan):
2095
"""Remove changes from new_plan that came from old_plan.
2097
It is assumed that the difference between the old_plan and new_plan
2098
is their choice of 'b' text.
2100
All lines from new_plan that differ from old_plan are emitted
2101
verbatim. All lines from new_plan that match old_plan but are
2102
not about the 'b' revision are emitted verbatim.
2104
Lines that match and are about the 'b' revision are the lines we
2105
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
2106
is skipped entirely.
2108
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
2111
for i, j, n in matcher.get_matching_blocks():
2112
for jj in range(last_j, j):
2114
for jj in range(j, j + n):
2115
plan_line = new_plan[jj]
2116
if plan_line[0] == 'new-b':
2118
elif plan_line[0] == 'killed-b':
2119
yield 'unchanged', plan_line[1]
2125
class _PlanMerge(_PlanMergeBase):
2126
"""Plan an annotate merge using on-the-fly annotation"""
2128
def __init__(self, a_rev, b_rev, vf, key_prefix):
2129
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
2130
self.a_key = self._key_prefix + (self.a_rev,)
2131
self.b_key = self._key_prefix + (self.b_rev,)
2132
self.graph = _mod_graph.Graph(self.vf)
2133
heads = self.graph.heads((self.a_key, self.b_key))
2135
# one side dominates, so we can just return its values, yay for
2137
# Ideally we would know that before we get this far
2138
self._head_key = heads.pop()
2139
if self._head_key == self.a_key:
2143
trace.mutter('found dominating revision for %s\n%s > %s', self.vf,
2144
self._head_key[-1], other)
2147
self._head_key = None
2150
def _precache_tip_lines(self):
2151
# Turn this into a no-op, because we will do this later
2154
def _find_recursive_lcas(self):
2155
"""Find all the ancestors back to a unique lca"""
2156
cur_ancestors = (self.a_key, self.b_key)
2157
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
2158
# rather than a key tuple. We will just map that directly to no common
2162
next_lcas = self.graph.find_lca(*cur_ancestors)
2163
# Map a plain NULL_REVISION to a simple no-ancestors
2164
if next_lcas == {_mod_revision.NULL_REVISION}:
2166
# Order the lca's based on when they were merged into the tip
2167
# While the actual merge portion of weave merge uses a set() of
2168
# active revisions, the order of insertion *does* effect the
2169
# implicit ordering of the texts.
2170
for rev_key in cur_ancestors:
2171
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
2173
parent_map[rev_key] = ordered_parents
2174
if len(next_lcas) == 0:
2176
elif len(next_lcas) == 1:
2177
parent_map[list(next_lcas)[0]] = ()
2179
elif len(next_lcas) > 2:
2180
# More than 2 lca's, fall back to grabbing all nodes between
2181
# this and the unique lca.
2182
trace.mutter('More than 2 LCAs, falling back to all nodes for:'
2184
self.a_key, self.b_key, cur_ancestors)
2185
cur_lcas = next_lcas
2186
while len(cur_lcas) > 1:
2187
cur_lcas = self.graph.find_lca(*cur_lcas)
2188
if len(cur_lcas) == 0:
2189
# No common base to find, use the full ancestry
2192
unique_lca = list(cur_lcas)[0]
2193
if unique_lca == _mod_revision.NULL_REVISION:
2194
# find_lca will return a plain 'NULL_REVISION' rather
2195
# than a key tuple when there is no common ancestor, we
2196
# prefer to just use None, because it doesn't confuse
2197
# _get_interesting_texts()
2199
parent_map.update(self._find_unique_parents(next_lcas,
2202
cur_ancestors = next_lcas
2205
def _find_unique_parents(self, tip_keys, base_key):
2206
"""Find ancestors of tip that aren't ancestors of base.
2208
:param tip_keys: Nodes that are interesting
2209
:param base_key: Cull all ancestors of this node
2210
:return: The parent map for all revisions between tip_keys and
2211
base_key. base_key will be included. References to nodes outside of
2212
the ancestor set will also be removed.
2214
# TODO: this would be simpler if find_unique_ancestors took a list
2215
# instead of a single tip, internally it supports it, but it
2216
# isn't a "backwards compatible" api change.
2217
if base_key is None:
2218
parent_map = dict(self.graph.iter_ancestry(tip_keys))
2219
# We remove NULL_REVISION because it isn't a proper tuple key, and
2220
# thus confuses things like _get_interesting_texts, and our logic
2221
# to add the texts into the memory weave.
2222
if _mod_revision.NULL_REVISION in parent_map:
2223
parent_map.pop(_mod_revision.NULL_REVISION)
2226
for tip in tip_keys:
2228
self.graph.find_unique_ancestors(tip, [base_key]))
2229
parent_map = self.graph.get_parent_map(interesting)
2230
parent_map[base_key] = ()
2231
culled_parent_map, child_map, tails = self._remove_external_references(
2233
# Remove all the tails but base_key
2234
if base_key is not None:
2235
tails.remove(base_key)
2236
self._prune_tails(culled_parent_map, child_map, tails)
2237
# Now remove all the uninteresting 'linear' regions
2238
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
2242
def _remove_external_references(parent_map):
2243
"""Remove references that go outside of the parent map.
2245
:param parent_map: Something returned from Graph.get_parent_map(keys)
2246
:return: (filtered_parent_map, child_map, tails)
2247
filtered_parent_map is parent_map without external references
2248
child_map is the {parent_key: [child_keys]} mapping
2249
tails is a list of nodes that do not have any parents in the map
2251
# TODO: The basic effect of this function seems more generic than
2252
# _PlanMerge. But the specific details of building a child_map,
2253
# and computing tails seems very specific to _PlanMerge.
2254
# Still, should this be in Graph land?
2255
filtered_parent_map = {}
2258
for key, parent_keys in viewitems(parent_map):
2259
culled_parent_keys = [p for p in parent_keys if p in parent_map]
2260
if not culled_parent_keys:
2262
for parent_key in culled_parent_keys:
2263
child_map.setdefault(parent_key, []).append(key)
2264
# TODO: Do we want to do this, it adds overhead for every node,
2265
# just to say that the node has no children
2266
child_map.setdefault(key, [])
2267
filtered_parent_map[key] = culled_parent_keys
2268
return filtered_parent_map, child_map, tails
2271
def _prune_tails(parent_map, child_map, tails_to_remove):
2272
"""Remove tails from the parent map.
2274
This will remove the supplied revisions until no more children have 0
2277
:param parent_map: A dict of {child: [parents]}, this dictionary will
2278
be modified in place.
2279
:param tails_to_remove: A list of tips that should be removed,
2280
this list will be consumed
2281
:param child_map: The reverse dict of parent_map ({parent: [children]})
2282
this dict will be modified
2283
:return: None, parent_map will be modified in place.
2285
while tails_to_remove:
2286
next = tails_to_remove.pop()
2287
parent_map.pop(next)
2288
children = child_map.pop(next)
2289
for child in children:
2290
child_parents = parent_map[child]
2291
child_parents.remove(next)
2292
if len(child_parents) == 0:
2293
tails_to_remove.append(child)
2295
def _get_interesting_texts(self, parent_map):
2296
"""Return a dict of texts we are interested in.
2298
Note that the input is in key tuples, but the output is in plain
2301
:param parent_map: The output from _find_recursive_lcas
2302
:return: A dict of {'revision_id':lines} as returned by
2303
_PlanMergeBase.get_lines()
2305
all_revision_keys = set(parent_map)
2306
all_revision_keys.add(self.a_key)
2307
all_revision_keys.add(self.b_key)
2309
# Everything else is in 'keys' but get_lines is in 'revision_ids'
2310
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
2313
def _build_weave(self):
2314
from .bzr import weave
2315
self._weave = weave.Weave(weave_name='in_memory_weave',
2316
allow_reserved=True)
2317
parent_map = self._find_recursive_lcas()
2319
all_texts = self._get_interesting_texts(parent_map)
2321
# Note: Unfortunately, the order given by topo_sort will effect the
2322
# ordering resolution in the output. Specifically, if you add A then B,
2323
# then in the output text A lines will show up before B lines. And, of
2324
# course, topo_sort doesn't guarantee any real ordering.
2325
# So we use merge_sort, and add a fake node on the tip.
2326
# This ensures that left-hand parents will always be inserted into the
2327
# weave before right-hand parents.
2328
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
2329
parent_map[tip_key] = (self.a_key, self.b_key)
2331
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
2335
# for key in tsort.topo_sort(parent_map):
2336
parent_keys = parent_map[key]
2337
revision_id = key[-1]
2338
parent_ids = [k[-1] for k in parent_keys]
2339
self._weave.add_lines(revision_id, parent_ids,
2340
all_texts[revision_id])
2342
def plan_merge(self):
2343
"""Generate a 'plan' for merging the two revisions.
2345
This involves comparing their texts and determining the cause of
2346
differences. If text A has a line and text B does not, then either the
2347
line was added to text A, or it was deleted from B. Once the causes
2348
are combined, they are written out in the format described in
2349
VersionedFile.plan_merge
2351
if self._head_key is not None: # There was a single head
2352
if self._head_key == self.a_key:
2355
if self._head_key != self.b_key:
2356
raise AssertionError('There was an invalid head: %s != %s'
2357
% (self.b_key, self._head_key))
2359
head_rev = self._head_key[-1]
2360
lines = self.get_lines([head_rev])[head_rev]
2361
return ((plan, line) for line in lines)
2362
return self._weave.plan_merge(self.a_rev, self.b_rev)
2365
class _PlanLCAMerge(_PlanMergeBase):
2367
This merge algorithm differs from _PlanMerge in that:
2369
1. comparisons are done against LCAs only
2370
2. cases where a contested line is new versus one LCA but old versus
2371
another are marked as conflicts, by emitting the line as conflicted-a
2374
This is faster, and hopefully produces more useful output.
2377
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
2378
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
2379
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
2382
if lca == _mod_revision.NULL_REVISION:
2385
self.lcas.add(lca[-1])
2386
for lca in self.lcas:
2387
if _mod_revision.is_null(lca):
2390
lca_lines = self.get_lines([lca])[lca]
2391
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
2393
blocks = list(matcher.get_matching_blocks())
2394
self._cached_matching_blocks[(a_rev, lca)] = blocks
2395
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2397
blocks = list(matcher.get_matching_blocks())
2398
self._cached_matching_blocks[(b_rev, lca)] = blocks
2400
def _determine_status(self, revision_id, unique_line_numbers):
2401
"""Determines the status unique lines versus all lcas.
2403
Basically, determines why the line is unique to this revision.
2405
A line may be determined new, killed, or both.
2407
If a line is determined new, that means it was not present in at least
2408
one LCA, and is not present in the other merge revision.
2410
If a line is determined killed, that means the line was present in
2413
If a line is killed and new, this indicates that the two merge
2414
revisions contain differing conflict resolutions.
2416
:param revision_id: The id of the revision in which the lines are
2418
:param unique_line_numbers: The line numbers of unique lines.
2419
:return: a tuple of (new_this, killed_other)
2423
unique_line_numbers = set(unique_line_numbers)
2424
for lca in self.lcas:
2425
blocks = self._get_matching_blocks(revision_id, lca)
2426
unique_vs_lca, _ignored = self._unique_lines(blocks)
2427
new.update(unique_line_numbers.intersection(unique_vs_lca))
2428
killed.update(unique_line_numbers.difference(unique_vs_lca))
919
merge_types = { "merge3": (Merge3Merger, "Native diff3-style merge"),
920
"diff3": (Diff3Merger, "Merge using external diff3"),
921
'weave': (WeaveMerger, "Weave-based merge")