1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
21
from .lazy_import import lazy_import
22
lazy_import(globals(), """
26
branch as _mod_branch,
27
conflicts as _mod_conflicts,
32
revision as _mod_revision,
41
from breezy.bzr import (
45
from breezy.i18n import gettext
53
# TODO: Report back as changes are merged in
56
def transform_tree(from_tree, to_tree, interesting_files=None):
57
with from_tree.lock_tree_write():
58
merge_inner(from_tree.branch, to_tree, from_tree,
59
ignore_zero=True, this_tree=from_tree,
60
interesting_files=interesting_files)
63
class MergeHooks(hooks.Hooks):
66
hooks.Hooks.__init__(self, "breezy.merge", "Merger.hooks")
67
self.add_hook('merge_file_content',
68
"Called with a breezy.merge.Merger object to create a per file "
69
"merge object when starting a merge. "
70
"Should return either None or a subclass of "
71
"``breezy.merge.AbstractPerFileMerger``. "
72
"Such objects will then be called per file "
73
"that needs to be merged (including when one "
74
"side has deleted the file and the other has changed it). "
75
"See the AbstractPerFileMerger API docs for details on how it is "
78
self.add_hook('pre_merge',
79
'Called before a merge. '
80
'Receives a Merger object as the single argument.',
82
self.add_hook('post_merge',
83
'Called after a merge. '
84
'Receives a Merger object as the single argument. '
85
'The return value is ignored.',
89
class AbstractPerFileMerger(object):
90
"""PerFileMerger objects are used by plugins extending merge for breezy.
92
See ``breezy.plugins.news_merge.news_merge`` for an example concrete class.
94
:ivar merger: The Merge3Merger performing the merge.
97
def __init__(self, merger):
98
"""Create a PerFileMerger for use with merger."""
101
def merge_contents(self, merge_params):
102
"""Attempt to merge the contents of a single file.
104
:param merge_params: A breezy.merge.MergeFileHookParams
105
:return: A tuple of (status, chunks), where status is one of
106
'not_applicable', 'success', 'conflicted', or 'delete'. If status
107
is 'success' or 'conflicted', then chunks should be an iterable of
108
strings for the new file contents.
110
return ('not applicable', None)
113
class PerFileMerger(AbstractPerFileMerger):
114
"""Merge individual files when self.file_matches returns True.
116
This class is intended to be subclassed. The file_matches and
117
merge_matching methods should be overridden with concrete implementations.
120
def file_matches(self, params):
121
"""Return True if merge_matching should be called on this file.
123
Only called with merges of plain files with no clear winner.
125
Subclasses must override this.
127
raise NotImplementedError(self.file_matches)
129
def merge_contents(self, params):
130
"""Merge the contents of a single file."""
131
# Check whether this custom merge logic should be used.
133
# OTHER is a straight winner, rely on default merge.
134
params.winner == 'other' or
135
# THIS and OTHER aren't both files.
136
not params.is_file_merge() or
137
# The filename doesn't match
138
not self.file_matches(params)):
139
return 'not_applicable', None
140
return self.merge_matching(params)
142
def merge_matching(self, params):
143
"""Merge the contents of a single file that has matched the criteria
144
in PerFileMerger.merge_contents (is a conflict, is a file,
145
self.file_matches is True).
147
Subclasses must override this.
149
raise NotImplementedError(self.merge_matching)
152
class ConfigurableFileMerger(PerFileMerger):
153
"""Merge individual files when configured via a .conf file.
155
This is a base class for concrete custom file merging logic. Concrete
156
classes should implement ``merge_text``.
158
See ``breezy.plugins.news_merge.news_merge`` for an example concrete class.
160
:ivar affected_files: The configured file paths to merge.
162
:cvar name_prefix: The prefix to use when looking up configuration
163
details. <name_prefix>_merge_files describes the files targeted by the
166
:cvar default_files: The default file paths to merge when no configuration
173
def __init__(self, merger):
174
super(ConfigurableFileMerger, self).__init__(merger)
175
self.affected_files = None
176
self.default_files = self.__class__.default_files or []
177
self.name_prefix = self.__class__.name_prefix
178
if self.name_prefix is None:
179
raise ValueError("name_prefix must be set.")
181
def file_matches(self, params):
182
"""Check whether the file should call the merge hook.
184
<name_prefix>_merge_files configuration variable is a list of files
185
that should use the hook.
187
affected_files = self.affected_files
188
if affected_files is None:
189
config = self.merger.this_branch.get_config()
190
# Until bzr provides a better policy for caching the config, we
191
# just add the part we're interested in to the params to avoid
192
# reading the config files repeatedly (breezy.conf, location.conf,
194
config_key = self.name_prefix + '_merge_files'
195
affected_files = config.get_user_option_as_list(config_key)
196
if affected_files is None:
197
# If nothing was specified in the config, use the default.
198
affected_files = self.default_files
199
self.affected_files = affected_files
201
filepath = params.this_path
202
if filepath in affected_files:
206
def merge_matching(self, params):
207
return self.merge_text(params)
209
def merge_text(self, params):
210
"""Merge the byte contents of a single file.
212
This is called after checking that the merge should be performed in
213
merge_contents, and it should behave as per
214
``breezy.merge.AbstractPerFileMerger.merge_contents``.
216
raise NotImplementedError(self.merge_text)
219
class MergeFileHookParams(object):
220
"""Object holding parameters passed to merge_file_content hooks.
222
There are some fields hooks can access:
224
:ivar base_path: Path in base tree
225
:ivar other_path: Path in other tree
226
:ivar this_path: Path in this tree
227
:ivar trans_id: the transform ID for the merge of this file
228
:ivar this_kind: kind of file in 'this' tree
229
:ivar other_kind: kind of file in 'other' tree
230
:ivar winner: one of 'this', 'other', 'conflict'
233
def __init__(self, merger, paths, trans_id, this_kind, other_kind,
235
self._merger = merger
237
self.base_path, self.other_path, self.this_path = paths
238
self.trans_id = trans_id
239
self.this_kind = this_kind
240
self.other_kind = other_kind
243
def is_file_merge(self):
244
"""True if this_kind and other_kind are both 'file'."""
245
return self.this_kind == 'file' and self.other_kind == 'file'
247
@decorators.cachedproperty
248
def base_lines(self):
249
"""The lines of the 'base' version of the file."""
250
return self._merger.get_lines(self._merger.base_tree, self.base_path)
252
@decorators.cachedproperty
253
def this_lines(self):
254
"""The lines of the 'this' version of the file."""
255
return self._merger.get_lines(self._merger.this_tree, self.this_path)
257
@decorators.cachedproperty
258
def other_lines(self):
259
"""The lines of the 'other' version of the file."""
260
return self._merger.get_lines(self._merger.other_tree, self.other_path)
263
class Merger(object):
267
def __init__(self, this_branch, other_tree=None, base_tree=None,
268
this_tree=None, change_reporter=None,
269
recurse='down', revision_graph=None):
270
object.__init__(self)
271
self.this_branch = this_branch
272
self.this_basis = _mod_revision.ensure_null(
273
this_branch.last_revision())
274
self.this_rev_id = None
275
self.this_tree = this_tree
276
self.this_revision_tree = None
277
self.this_basis_tree = None
278
self.other_tree = other_tree
279
self.other_branch = None
280
self.base_tree = base_tree
281
self.ignore_zero = False
282
self.backup_files = False
283
self.interesting_files = None
284
self.show_base = False
285
self.reprocess = False
287
self.recurse = recurse
288
self.change_reporter = change_reporter
289
self._cached_trees = {}
290
self._revision_graph = revision_graph
291
self._base_is_ancestor = None
292
self._base_is_other_ancestor = None
293
self._is_criss_cross = None
294
self._lca_trees = None
296
def cache_trees_with_revision_ids(self, trees):
297
"""Cache any tree in trees if it has a revision_id."""
298
for maybe_tree in trees:
299
if maybe_tree is None:
302
rev_id = maybe_tree.get_revision_id()
303
except AttributeError:
305
self._cached_trees[rev_id] = maybe_tree
308
def revision_graph(self):
309
if self._revision_graph is None:
310
self._revision_graph = self.this_branch.repository.get_graph()
311
return self._revision_graph
313
def _set_base_is_ancestor(self, value):
314
self._base_is_ancestor = value
316
def _get_base_is_ancestor(self):
317
if self._base_is_ancestor is None:
318
self._base_is_ancestor = self.revision_graph.is_ancestor(
319
self.base_rev_id, self.this_basis)
320
return self._base_is_ancestor
322
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
324
def _set_base_is_other_ancestor(self, value):
325
self._base_is_other_ancestor = value
327
def _get_base_is_other_ancestor(self):
328
if self._base_is_other_ancestor is None:
329
if self.other_basis is None:
331
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
332
self.base_rev_id, self.other_basis)
333
return self._base_is_other_ancestor
335
base_is_other_ancestor = property(_get_base_is_other_ancestor,
336
_set_base_is_other_ancestor)
339
def from_uncommitted(tree, other_tree, base_tree=None):
340
"""Return a Merger for uncommitted changes in other_tree.
342
:param tree: The tree to merge into
343
:param other_tree: The tree to get uncommitted changes from
344
:param base_tree: The basis to use for the merge. If unspecified,
345
other_tree.basis_tree() will be used.
347
if base_tree is None:
348
base_tree = other_tree.basis_tree()
349
merger = Merger(tree.branch, other_tree, base_tree, tree)
350
merger.base_rev_id = merger.base_tree.get_revision_id()
351
merger.other_rev_id = None
352
merger.other_basis = merger.base_rev_id
356
def from_mergeable(klass, tree, mergeable):
357
"""Return a Merger for a bundle or merge directive.
359
:param tree: The tree to merge changes into
360
:param mergeable: A merge directive or bundle
362
mergeable.install_revisions(tree.branch.repository)
363
base_revision_id, other_revision_id, verified =\
364
mergeable.get_merge_request(tree.branch.repository)
365
revision_graph = tree.branch.repository.get_graph()
366
if base_revision_id is not None:
367
if (base_revision_id != _mod_revision.NULL_REVISION and
368
revision_graph.is_ancestor(
369
base_revision_id, tree.branch.last_revision())):
370
base_revision_id = None
372
trace.warning('Performing cherrypick')
373
merger = klass.from_revision_ids(tree, other_revision_id,
374
base_revision_id, revision_graph=revision_graph)
375
return merger, verified
378
def from_revision_ids(tree, other, base=None, other_branch=None,
379
base_branch=None, revision_graph=None,
381
"""Return a Merger for revision-ids.
383
:param tree: The tree to merge changes into
384
:param other: The revision-id to use as OTHER
385
:param base: The revision-id to use as BASE. If not specified, will
387
:param other_branch: A branch containing the other revision-id. If
388
not supplied, tree.branch is used.
389
:param base_branch: A branch containing the base revision-id. If
390
not supplied, other_branch or tree.branch will be used.
391
:param revision_graph: If you have a revision_graph precomputed, pass
392
it in, otherwise it will be created for you.
393
:param tree_branch: The branch associated with tree. If not supplied,
394
tree.branch will be used.
396
if tree_branch is None:
397
tree_branch = tree.branch
398
merger = Merger(tree_branch, this_tree=tree,
399
revision_graph=revision_graph)
400
if other_branch is None:
401
other_branch = tree.branch
402
merger.set_other_revision(other, other_branch)
406
if base_branch is None:
407
base_branch = other_branch
408
merger.set_base_revision(base, base_branch)
411
def revision_tree(self, revision_id, branch=None):
412
if revision_id not in self._cached_trees:
414
branch = self.this_branch
416
tree = self.this_tree.revision_tree(revision_id)
417
except errors.NoSuchRevisionInTree:
418
tree = branch.repository.revision_tree(revision_id)
419
self._cached_trees[revision_id] = tree
420
return self._cached_trees[revision_id]
422
def _get_tree(self, treespec, possible_transports=None):
423
location, revno = treespec
425
tree = workingtree.WorkingTree.open_containing(location)[0]
426
return tree.branch, tree
427
branch = _mod_branch.Branch.open_containing(
428
location, possible_transports)[0]
430
revision_id = branch.last_revision()
432
revision_id = branch.get_rev_id(revno)
433
revision_id = _mod_revision.ensure_null(revision_id)
434
return branch, self.revision_tree(revision_id, branch)
436
def set_interesting_files(self, file_list):
437
self.interesting_files = file_list
439
def set_pending(self):
440
if (not self.base_is_ancestor or not self.base_is_other_ancestor
441
or self.other_rev_id is None):
445
def _add_parent(self):
446
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
447
new_parent_trees = []
448
with contextlib.ExitStack() as stack:
449
for revision_id in new_parents:
451
tree = self.revision_tree(revision_id)
452
except errors.NoSuchRevision:
455
stack.enter_context(tree.lock_read())
456
new_parent_trees.append((revision_id, tree))
457
self.this_tree.set_parent_trees(new_parent_trees, allow_leftmost_as_ghost=True)
459
def set_other(self, other_revision, possible_transports=None):
460
"""Set the revision and tree to merge from.
462
This sets the other_tree, other_rev_id, other_basis attributes.
464
:param other_revision: The [path, revision] list to merge from.
466
self.other_branch, self.other_tree = self._get_tree(other_revision,
468
if other_revision[1] == -1:
469
self.other_rev_id = _mod_revision.ensure_null(
470
self.other_branch.last_revision())
471
if _mod_revision.is_null(self.other_rev_id):
472
raise errors.NoCommits(self.other_branch)
473
self.other_basis = self.other_rev_id
474
elif other_revision[1] is not None:
475
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
476
self.other_basis = self.other_rev_id
478
self.other_rev_id = None
479
self.other_basis = self.other_branch.last_revision()
480
if self.other_basis is None:
481
raise errors.NoCommits(self.other_branch)
482
if self.other_rev_id is not None:
483
self._cached_trees[self.other_rev_id] = self.other_tree
484
self._maybe_fetch(self.other_branch,
485
self.this_branch, self.other_basis)
487
def set_other_revision(self, revision_id, other_branch):
488
"""Set 'other' based on a branch and revision id
490
:param revision_id: The revision to use for a tree
491
:param other_branch: The branch containing this tree
493
self.other_rev_id = revision_id
494
self.other_branch = other_branch
495
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
496
self.other_tree = self.revision_tree(revision_id)
497
self.other_basis = revision_id
499
def set_base_revision(self, revision_id, branch):
500
"""Set 'base' based on a branch and revision id
502
:param revision_id: The revision to use for a tree
503
:param branch: The branch containing this tree
505
self.base_rev_id = revision_id
506
self.base_branch = branch
507
self._maybe_fetch(branch, self.this_branch, revision_id)
508
self.base_tree = self.revision_tree(revision_id)
510
def _maybe_fetch(self, source, target, revision_id):
511
if not source.repository.has_same_location(target.repository):
512
target.fetch(source, revision_id)
515
revisions = [_mod_revision.ensure_null(self.this_basis),
516
_mod_revision.ensure_null(self.other_basis)]
517
if _mod_revision.NULL_REVISION in revisions:
518
self.base_rev_id = _mod_revision.NULL_REVISION
519
self.base_tree = self.revision_tree(self.base_rev_id)
520
self._is_criss_cross = False
522
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
523
self._is_criss_cross = False
525
self.base_rev_id = _mod_revision.NULL_REVISION
527
self.base_rev_id = list(lcas)[0]
528
else: # len(lcas) > 1
529
self._is_criss_cross = True
531
# find_unique_lca can only handle 2 nodes, so we have to
532
# start back at the beginning. It is a shame to traverse
533
# the graph again, but better than re-implementing
535
self.base_rev_id = self.revision_graph.find_unique_lca(
536
revisions[0], revisions[1])
538
self.base_rev_id = self.revision_graph.find_unique_lca(
540
sorted_lca_keys = self.revision_graph.find_merge_order(
542
if self.base_rev_id == _mod_revision.NULL_REVISION:
543
self.base_rev_id = sorted_lca_keys[0]
545
if self.base_rev_id == _mod_revision.NULL_REVISION:
546
raise errors.UnrelatedBranches()
547
if self._is_criss_cross:
548
trace.warning('Warning: criss-cross merge encountered. See bzr'
549
' help criss-cross.')
550
trace.mutter('Criss-cross lcas: %r' % lcas)
551
if self.base_rev_id in lcas:
552
trace.mutter('Unable to find unique lca. '
553
'Fallback %r as best option.'
555
interesting_revision_ids = set(lcas)
556
interesting_revision_ids.add(self.base_rev_id)
557
interesting_trees = dict((t.get_revision_id(), t)
558
for t in self.this_branch.repository.revision_trees(
559
interesting_revision_ids))
560
self._cached_trees.update(interesting_trees)
561
if self.base_rev_id in lcas:
562
self.base_tree = interesting_trees[self.base_rev_id]
564
self.base_tree = interesting_trees.pop(self.base_rev_id)
565
self._lca_trees = [interesting_trees[key]
566
for key in sorted_lca_keys]
568
self.base_tree = self.revision_tree(self.base_rev_id)
569
self.base_is_ancestor = True
570
self.base_is_other_ancestor = True
571
trace.mutter('Base revid: %r' % self.base_rev_id)
573
def set_base(self, base_revision):
574
"""Set the base revision to use for the merge.
576
:param base_revision: A 2-list containing a path and revision number.
578
trace.mutter("doing merge() with no base_revision specified")
579
if base_revision == [None, None]:
582
base_branch, self.base_tree = self._get_tree(base_revision)
583
if base_revision[1] == -1:
584
self.base_rev_id = base_branch.last_revision()
585
elif base_revision[1] is None:
586
self.base_rev_id = _mod_revision.NULL_REVISION
588
self.base_rev_id = _mod_revision.ensure_null(
589
base_branch.get_rev_id(base_revision[1]))
590
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
592
def make_merger(self):
593
kwargs = {'working_tree': self.this_tree, 'this_tree': self.this_tree,
594
'other_tree': self.other_tree,
595
'interesting_files': self.interesting_files,
596
'this_branch': self.this_branch,
597
'other_branch': self.other_branch,
599
if self.merge_type.requires_base:
600
kwargs['base_tree'] = self.base_tree
601
if self.merge_type.supports_reprocess:
602
kwargs['reprocess'] = self.reprocess
604
raise errors.BzrError(
605
"Conflict reduction is not supported for merge"
606
" type %s." % self.merge_type)
607
if self.merge_type.supports_show_base:
608
kwargs['show_base'] = self.show_base
610
raise errors.BzrError("Showing base is not supported for this"
611
" merge type. %s" % self.merge_type)
612
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
613
and not self.base_is_other_ancestor):
614
raise errors.CannotReverseCherrypick()
615
if self.merge_type.supports_cherrypick:
616
kwargs['cherrypick'] = (not self.base_is_ancestor or
617
not self.base_is_other_ancestor)
618
if self._is_criss_cross and getattr(self.merge_type,
619
'supports_lca_trees', False):
620
kwargs['lca_trees'] = self._lca_trees
621
return self.merge_type(change_reporter=self.change_reporter,
624
def _do_merge_to(self):
625
merge = self.make_merger()
626
if self.other_branch is not None:
627
self.other_branch.update_references(self.this_branch)
628
for hook in Merger.hooks['pre_merge']:
631
for hook in Merger.hooks['post_merge']:
633
if self.recurse == 'down':
634
for relpath in self.this_tree.iter_references():
635
sub_tree = self.this_tree.get_nested_tree(relpath)
636
other_revision = self.other_tree.get_reference_revision(
638
if other_revision == sub_tree.last_revision():
640
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
641
sub_merge.merge_type = self.merge_type
642
other_branch = self.other_tree.reference_parent(relpath)
643
sub_merge.set_other_revision(other_revision, other_branch)
644
base_tree_path = _mod_tree.find_previous_path(
645
self.this_tree, self.base_tree, relpath)
646
base_revision = self.base_tree.get_reference_revision(
648
sub_merge.base_tree = \
649
sub_tree.branch.repository.revision_tree(base_revision)
650
sub_merge.base_rev_id = base_revision
655
with contextlib.ExitStack() as stack:
656
stack.enter_context(self.this_tree.lock_tree_write())
657
if self.base_tree is not None:
658
stack.enter_context(self.base_tree.lock_read())
659
if self.other_tree is not None:
660
stack.enter_context(self.other_tree.lock_read())
661
merge = self._do_merge_to()
662
if len(merge.cooked_conflicts) == 0:
663
if not self.ignore_zero and not trace.is_quiet():
664
trace.note(gettext("All changes applied successfully."))
666
trace.note(gettext("%d conflicts encountered.")
667
% len(merge.cooked_conflicts))
669
return len(merge.cooked_conflicts)
672
class _InventoryNoneEntry(object):
673
"""This represents an inventory entry which *isn't there*.
675
It simplifies the merging logic if we always have an InventoryEntry, even
676
if it isn't actually present
683
symlink_target = None
686
def is_unmodified(self, other):
690
_none_entry = _InventoryNoneEntry()
693
class Merge3Merger(object):
694
"""Three-way merger that uses the merge3 text merger"""
696
supports_reprocess = True
697
supports_show_base = True
698
history_based = False
699
supports_cherrypick = True
700
supports_reverse_cherrypick = True
701
winner_idx = {"this": 2, "other": 1, "conflict": 1}
702
supports_lca_trees = True
703
requires_file_merge_plan = False
705
def __init__(self, working_tree, this_tree, base_tree, other_tree,
706
reprocess=False, show_base=False,
707
change_reporter=None, interesting_files=None, do_merge=True,
708
cherrypick=False, lca_trees=None, this_branch=None,
710
"""Initialize the merger object and perform the merge.
712
:param working_tree: The working tree to apply the merge to
713
:param this_tree: The local tree in the merge operation
714
:param base_tree: The common tree in the merge operation
715
:param other_tree: The other tree to merge changes from
716
:param this_branch: The branch associated with this_tree. Defaults to
717
this_tree.branch if not supplied.
718
:param other_branch: The branch associated with other_tree, if any.
719
:param: reprocess If True, perform conflict-reduction processing.
720
:param show_base: If True, show the base revision in text conflicts.
721
(incompatible with reprocess)
722
:param change_reporter: An object that should report changes made
723
:param interesting_files: The tree-relative paths of files that should
724
participate in the merge. If these paths refer to directories,
725
the contents of those directories will also be included. If not
726
specified, all files may participate in the
728
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
729
if the ancestry was found to include a criss-cross merge.
730
Otherwise should be None.
732
object.__init__(self)
733
if this_branch is None:
734
this_branch = this_tree.branch
735
self.interesting_files = interesting_files
736
self.working_tree = working_tree
737
self.this_tree = this_tree
738
self.base_tree = base_tree
739
self.other_tree = other_tree
740
self.this_branch = this_branch
741
self.other_branch = other_branch
742
self._raw_conflicts = []
743
self.cooked_conflicts = []
744
self.reprocess = reprocess
745
self.show_base = show_base
746
self._lca_trees = lca_trees
747
# Uncommenting this will change the default algorithm to always use
748
# _entries_lca. This can be useful for running the test suite and
749
# making sure we haven't missed any corner cases.
750
# if lca_trees is None:
751
# self._lca_trees = [self.base_tree]
752
self.change_reporter = change_reporter
753
self.cherrypick = cherrypick
758
with contextlib.ExitStack() as stack:
759
stack.enter_context(self.working_tree.lock_tree_write())
760
stack.enter_context(self.this_tree.lock_read())
761
stack.enter_context(self.base_tree.lock_read())
762
stack.enter_context(self.other_tree.lock_read())
763
self.tt = self.working_tree.get_transform()
764
stack.enter_context(self.tt)
765
self._compute_transform()
766
results = self.tt.apply(no_conflicts=True)
767
self.write_modified(results)
769
self.working_tree.add_conflicts(self.cooked_conflicts)
770
except errors.UnsupportedOperation:
773
def make_preview_transform(self):
774
with self.base_tree.lock_read(), self.other_tree.lock_read():
775
self.tt = transform.TransformPreview(self.working_tree)
776
self._compute_transform()
779
def _compute_transform(self):
780
if self._lca_trees is None:
781
entries = self._entries3()
782
resolver = self._three_way
784
entries = self._entries_lca()
785
resolver = self._lca_multi_way
786
# Prepare merge hooks
787
factories = Merger.hooks['merge_file_content']
788
# One hook for each registered one plus our default merger
789
hooks = [factory(self) for factory in factories] + [self]
790
self.active_hooks = [hook for hook in hooks if hook is not None]
791
with ui.ui_factory.nested_progress_bar() as child_pb:
792
for num, (file_id, changed, paths3, parents3, names3,
793
executable3) in enumerate(entries):
794
trans_id = self.tt.trans_id_file_id(file_id)
796
# Try merging each entry
797
child_pb.update(gettext('Preparing file merge'),
799
self._merge_names(trans_id, file_id, paths3, parents3,
800
names3, resolver=resolver)
802
file_status = self._do_merge_contents(paths3, trans_id, file_id)
804
file_status = 'unmodified'
805
self._merge_executable(paths3, trans_id, executable3,
806
file_status, resolver=resolver)
807
self.tt.fixup_new_roots()
808
self._finish_computing_transform()
810
def _finish_computing_transform(self):
811
"""Finalize the transform and report the changes.
813
This is the second half of _compute_transform.
815
with ui.ui_factory.nested_progress_bar() as child_pb:
816
fs_conflicts = transform.resolve_conflicts(
818
lambda t, c: transform.conflict_pass(t, c, self.other_tree))
819
if self.change_reporter is not None:
820
from breezy import delta
821
delta.report_changes(
822
self.tt.iter_changes(), self.change_reporter)
823
self.cook_conflicts(fs_conflicts)
824
for conflict in self.cooked_conflicts:
825
trace.warning('%s', conflict.describe())
828
"""Gather data about files modified between three trees.
830
Return a list of tuples of file_id, changed, parents3, names3,
831
executable3. changed is a boolean indicating whether the file contents
832
or kind were changed. parents3 is a tuple of parent ids for base,
833
other and this. names3 is a tuple of names for base, other and this.
834
executable3 is a tuple of execute-bit values for base, other and this.
837
iterator = self.other_tree.iter_changes(self.base_tree,
838
specific_files=self.interesting_files,
839
extra_trees=[self.this_tree])
840
this_interesting_files = self.this_tree.find_related_paths_across_trees(
841
self.interesting_files, trees=[self.other_tree])
842
this_entries = dict(self.this_tree.iter_entries_by_dir(
843
specific_files=this_interesting_files))
844
for change in iterator:
845
if change.path[0] is not None:
846
this_path = _mod_tree.find_previous_path(
847
self.base_tree, self.this_tree, change.path[0])
849
this_path = _mod_tree.find_previous_path(
850
self.other_tree, self.this_tree, change.path[1])
851
this_entry = this_entries.get(this_path)
852
if this_entry is not None:
853
this_name = this_entry.name
854
this_parent = this_entry.parent_id
855
this_executable = this_entry.executable
859
this_executable = None
860
parents3 = change.parent_id + (this_parent,)
861
names3 = change.name + (this_name,)
862
paths3 = change.path + (this_path, )
863
executable3 = change.executable + (this_executable,)
865
(change.file_id, change.changed_content, paths3,
866
parents3, names3, executable3))
869
def _entries_lca(self):
870
"""Gather data about files modified between multiple trees.
872
This compares OTHER versus all LCA trees, and for interesting entries,
873
it then compares with THIS and BASE.
875
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
877
:return: [(file_id, changed, paths, parents, names, executable)], where:
879
* file_id: Simple file_id of the entry
880
* changed: Boolean, True if the kind or contents changed else False
881
* paths: ((base, [path, in, lcas]), path_other, path_this)
882
* parents: ((base, [parent_id, in, lcas]), parent_id_other,
884
* names: ((base, [name, in, lcas]), name_in_other, name_in_this)
885
* executable: ((base, [exec, in, lcas]), exec_in_other,
888
if self.interesting_files is not None:
889
lookup_trees = [self.this_tree, self.base_tree]
890
lookup_trees.extend(self._lca_trees)
891
# I think we should include the lca trees as well
892
interesting_files = self.other_tree.find_related_paths_across_trees(
893
self.interesting_files, lookup_trees)
895
interesting_files = None
897
from .multiwalker import MultiWalker
898
walker = MultiWalker(self.other_tree, self._lca_trees)
900
for other_path, file_id, other_ie, lca_values in walker.iter_all():
901
# Is this modified at all from any of the other trees?
903
other_ie = _none_entry
905
if interesting_files is not None and other_path not in interesting_files:
908
# If other_revision is found in any of the lcas, that means this
909
# node is uninteresting. This is because when merging, if there are
910
# multiple heads(), we have to create a new node. So if we didn't,
911
# we know that the ancestry is linear, and that OTHER did not
913
# See doc/developers/lca_merge_resolution.txt for details
914
# We can't use this shortcut when other_revision is None,
915
# because it may be None because things are WorkingTrees, and
916
# not because it is *actually* None.
917
is_unmodified = False
918
for lca_path, ie in lca_values:
919
if ie is not None and other_ie.is_unmodified(ie):
927
for lca_path, lca_ie in lca_values:
929
lca_entries.append(_none_entry)
930
lca_paths.append(None)
932
lca_entries.append(lca_ie)
933
lca_paths.append(lca_path)
936
base_path = self.base_tree.id2path(file_id)
937
except errors.NoSuchId:
939
base_ie = _none_entry
941
base_ie = next(self.base_tree.iter_entries_by_dir(specific_files=[base_path]))[1]
944
this_path = self.this_tree.id2path(file_id)
945
except errors.NoSuchId:
946
this_ie = _none_entry
949
this_ie = next(self.this_tree.iter_entries_by_dir(specific_files=[this_path]))[1]
955
for lca_ie in lca_entries:
956
lca_kinds.append(lca_ie.kind)
957
lca_parent_ids.append(lca_ie.parent_id)
958
lca_names.append(lca_ie.name)
959
lca_executable.append(lca_ie.executable)
961
kind_winner = self._lca_multi_way(
962
(base_ie.kind, lca_kinds),
963
other_ie.kind, this_ie.kind)
964
parent_id_winner = self._lca_multi_way(
965
(base_ie.parent_id, lca_parent_ids),
966
other_ie.parent_id, this_ie.parent_id)
967
name_winner = self._lca_multi_way(
968
(base_ie.name, lca_names),
969
other_ie.name, this_ie.name)
971
content_changed = True
972
if kind_winner == 'this':
973
# No kind change in OTHER, see if there are *any* changes
974
if other_ie.kind == 'directory':
975
if parent_id_winner == 'this' and name_winner == 'this':
976
# No change for this directory in OTHER, skip
978
content_changed = False
979
elif other_ie.kind is None or other_ie.kind == 'file':
980
def get_sha1(tree, path):
984
return tree.get_file_sha1(path)
985
except errors.NoSuchFile:
987
base_sha1 = get_sha1(self.base_tree, base_path)
988
lca_sha1s = [get_sha1(tree, lca_path)
990
in zip(self._lca_trees, lca_paths)]
991
this_sha1 = get_sha1(self.this_tree, this_path)
992
other_sha1 = get_sha1(self.other_tree, other_path)
993
sha1_winner = self._lca_multi_way(
994
(base_sha1, lca_sha1s), other_sha1, this_sha1,
995
allow_overriding_lca=False)
996
exec_winner = self._lca_multi_way(
997
(base_ie.executable, lca_executable),
998
other_ie.executable, this_ie.executable)
999
if (parent_id_winner == 'this' and name_winner == 'this'
1000
and sha1_winner == 'this' and exec_winner == 'this'):
1001
# No kind, parent, name, exec, or content change for
1002
# OTHER, so this node is not considered interesting
1004
if sha1_winner == 'this':
1005
content_changed = False
1006
elif other_ie.kind == 'symlink':
1007
def get_target(ie, tree, path):
1008
if ie.kind != 'symlink':
1010
return tree.get_symlink_target(path)
1011
base_target = get_target(base_ie, self.base_tree, base_path)
1012
lca_targets = [get_target(ie, tree, lca_path) for ie, tree, lca_path
1013
in zip(lca_entries, self._lca_trees, lca_paths)]
1014
this_target = get_target(
1015
this_ie, self.this_tree, this_path)
1016
other_target = get_target(
1017
other_ie, self.other_tree, other_path)
1018
target_winner = self._lca_multi_way(
1019
(base_target, lca_targets),
1020
other_target, this_target)
1021
if (parent_id_winner == 'this' and name_winner == 'this'
1022
and target_winner == 'this'):
1023
# No kind, parent, name, or symlink target change
1026
if target_winner == 'this':
1027
content_changed = False
1028
elif other_ie.kind == 'tree-reference':
1029
# The 'changed' information seems to be handled at a higher
1030
# level. At least, _entries3 returns False for content
1031
# changed, even when at a new revision_id.
1032
content_changed = False
1033
if (parent_id_winner == 'this' and name_winner == 'this'):
1034
# Nothing interesting
1037
raise AssertionError('unhandled kind: %s' % other_ie.kind)
1039
# If we have gotten this far, that means something has changed
1040
result.append((file_id, content_changed,
1041
((base_path, lca_paths),
1042
other_path, this_path),
1043
((base_ie.parent_id, lca_parent_ids),
1044
other_ie.parent_id, this_ie.parent_id),
1045
((base_ie.name, lca_names),
1046
other_ie.name, this_ie.name),
1047
((base_ie.executable, lca_executable),
1048
other_ie.executable, this_ie.executable)
1052
def write_modified(self, results):
1053
if not self.working_tree.supports_merge_modified():
1055
modified_hashes = {}
1056
for path in results.modified_paths:
1057
wt_relpath = self.working_tree.relpath(path)
1058
if not self.working_tree.is_versioned(wt_relpath):
1060
hash = self.working_tree.get_file_sha1(wt_relpath)
1063
modified_hashes[wt_relpath] = hash
1064
self.working_tree.set_merge_modified(modified_hashes)
1068
"""Determine the parent for a file_id (used as a key method)"""
1071
return entry.parent_id
1075
"""Determine the name for a file_id (used as a key method)"""
1081
def contents_sha1(tree, path):
1082
"""Determine the sha1 of the file contents (used as a key method)."""
1084
return tree.get_file_sha1(path)
1085
except errors.NoSuchFile:
1089
def executable(tree, path):
1090
"""Determine the executability of a file-id (used as a key method)."""
1092
if tree.kind(path) != "file":
1094
except errors.NoSuchFile:
1096
return tree.is_executable(path)
1099
def kind(tree, path):
1100
"""Determine the kind of a file-id (used as a key method)."""
1102
return tree.kind(path)
1103
except errors.NoSuchFile:
1107
def _three_way(base, other, this):
1109
# if 'base == other', either they all agree, or only 'this' has
1112
elif this not in (base, other):
1113
# 'this' is neither 'base' nor 'other', so both sides changed
1116
# "Ambiguous clean merge" -- both sides have made the same change.
1119
# this == base: only other has changed.
1123
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
1124
"""Consider LCAs when determining whether a change has occurred.
1126
If LCAS are all identical, this is the same as a _three_way comparison.
1128
:param bases: value in (BASE, [LCAS])
1129
:param other: value in OTHER
1130
:param this: value in THIS
1131
:param allow_overriding_lca: If there is more than one unique lca
1132
value, allow OTHER to override THIS if it has a new value, and
1133
THIS only has an lca value, or vice versa. This is appropriate for
1134
truly scalar values, not as much for non-scalars.
1135
:return: 'this', 'other', or 'conflict' depending on whether an entry
1138
# See doc/developers/lca_tree_merging.txt for details about this
1141
# Either Ambiguously clean, or nothing was actually changed. We
1144
base_val, lca_vals = bases
1145
# Remove 'base_val' from the lca_vals, because it is not interesting
1146
filtered_lca_vals = [lca_val for lca_val in lca_vals
1147
if lca_val != base_val]
1148
if len(filtered_lca_vals) == 0:
1149
return Merge3Merger._three_way(base_val, other, this)
1151
unique_lca_vals = set(filtered_lca_vals)
1152
if len(unique_lca_vals) == 1:
1153
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1155
if allow_overriding_lca:
1156
if other in unique_lca_vals:
1157
if this in unique_lca_vals:
1158
# Each side picked a different lca, conflict
1161
# This has a value which supersedes both lca values, and
1162
# other only has an lca value
1164
elif this in unique_lca_vals:
1165
# OTHER has a value which supersedes both lca values, and this
1166
# only has an lca value
1169
# At this point, the lcas disagree, and the tip disagree
1172
def _merge_names(self, trans_id, file_id, paths, parents, names, resolver):
1173
"""Perform a merge on file names and parents"""
1174
base_name, other_name, this_name = names
1175
base_parent, other_parent, this_parent = parents
1176
unused_base_path, other_path, this_path = paths
1178
name_winner = resolver(*names)
1180
parent_id_winner = resolver(*parents)
1181
if this_name is None:
1182
if name_winner == "this":
1183
name_winner = "other"
1184
if parent_id_winner == "this":
1185
parent_id_winner = "other"
1186
if name_winner == "this" and parent_id_winner == "this":
1188
if name_winner == 'conflict' or parent_id_winner == 'conflict':
1189
# Creating helpers (.OTHER or .THIS) here cause problems down the
1190
# road if a ContentConflict needs to be created so we should not do
1192
self._raw_conflicts.append(('path conflict', trans_id, file_id,
1193
this_parent, this_name,
1194
other_parent, other_name))
1195
if other_path is None:
1196
# it doesn't matter whether the result was 'other' or
1197
# 'conflict'-- if it has no file id, we leave it alone.
1199
parent_id = parents[self.winner_idx[parent_id_winner]]
1200
name = names[self.winner_idx[name_winner]]
1201
if parent_id is not None or name is not None:
1202
# if we get here, name_winner and parent_winner are set to safe
1204
if parent_id is None and name is not None:
1205
# if parent_id is None and name is non-None, current file is
1207
if names[self.winner_idx[parent_id_winner]] != '':
1208
raise AssertionError(
1209
'File looks like a root, but named %s' %
1210
names[self.winner_idx[parent_id_winner]])
1211
parent_trans_id = transform.ROOT_PARENT
1213
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1214
self.tt.adjust_path(name, parent_trans_id, trans_id)
1216
def _do_merge_contents(self, paths, trans_id, file_id):
1217
"""Performs a merge on file_id contents."""
1218
def contents_pair(tree, path):
1222
kind = tree.kind(path)
1223
except errors.NoSuchFile:
1226
contents = tree.get_file_sha1(path)
1227
elif kind == "symlink":
1228
contents = tree.get_symlink_target(path)
1231
return kind, contents
1233
base_path, other_path, this_path = paths
1234
# See SPOT run. run, SPOT, run.
1235
# So we're not QUITE repeating ourselves; we do tricky things with
1237
other_pair = contents_pair(self.other_tree, other_path)
1238
this_pair = contents_pair(self.this_tree, this_path)
1240
(base_path, lca_paths) = base_path
1241
base_pair = contents_pair(self.base_tree, base_path)
1242
lca_pairs = [contents_pair(tree, path)
1243
for tree, path in zip(self._lca_trees, lca_paths)]
1244
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1245
this_pair, allow_overriding_lca=False)
1247
base_pair = contents_pair(self.base_tree, base_path)
1248
if base_pair == other_pair:
1251
# We delayed evaluating this_pair as long as we can to avoid
1252
# unnecessary sha1 calculation
1253
this_pair = contents_pair(self.this_tree, this_path)
1254
winner = self._three_way(base_pair, other_pair, this_pair)
1255
if winner == 'this':
1256
# No interesting changes introduced by OTHER
1258
# We have a hypothetical conflict, but if we have files, then we
1259
# can try to merge the content
1260
params = MergeFileHookParams(
1261
self, (base_path, other_path, this_path), trans_id, this_pair[0],
1262
other_pair[0], winner)
1263
hooks = self.active_hooks
1264
hook_status = 'not_applicable'
1266
hook_status, lines = hook.merge_contents(params)
1267
if hook_status != 'not_applicable':
1268
# Don't try any more hooks, this one applies.
1270
# If the merge ends up replacing the content of the file, we get rid of
1271
# it at the end of this method (this variable is used to track the
1272
# exceptions to this rule).
1275
if hook_status == 'not_applicable':
1276
# No merge hook was able to resolve the situation. Two cases exist:
1277
# a content conflict or a duplicate one.
1279
name = self.tt.final_name(trans_id)
1280
parent_id = self.tt.final_parent(trans_id)
1281
inhibit_content_conflict = False
1282
if params.this_kind is None: # file_id is not in THIS
1283
# Is the name used for a different file_id ?
1284
if self.this_tree.is_versioned(other_path):
1285
# Two entries for the same path
1287
# versioning the merged file will trigger a duplicate
1289
self.tt.version_file(file_id, trans_id)
1290
transform.create_from_tree(
1291
self.tt, trans_id, self.other_tree,
1293
filter_tree_path=self._get_filter_tree_path(other_path))
1294
inhibit_content_conflict = True
1295
elif params.other_kind is None: # file_id is not in OTHER
1296
# Is the name used for a different file_id ?
1297
if self.other_tree.is_versioned(this_path):
1298
# Two entries for the same path again, but here, the other
1299
# entry will also be merged. We simply inhibit the
1300
# 'content' conflict creation because we know OTHER will
1301
# create (or has already created depending on ordering) an
1302
# entry at the same path. This will trigger a 'duplicate'
1305
inhibit_content_conflict = True
1306
if not inhibit_content_conflict:
1307
if params.this_kind is not None:
1308
self.tt.unversion_file(trans_id)
1309
# This is a contents conflict, because none of the available
1310
# functions could merge it.
1311
file_group = self._dump_conflicts(
1312
name, (base_path, other_path, this_path), parent_id,
1313
file_id, set_version=True)
1314
self._raw_conflicts.append(('contents conflict', file_group))
1315
elif hook_status == 'success':
1316
self.tt.create_file(lines, trans_id)
1317
elif hook_status == 'conflicted':
1318
# XXX: perhaps the hook should be able to provide
1319
# the BASE/THIS/OTHER files?
1320
self.tt.create_file(lines, trans_id)
1321
self._raw_conflicts.append(('text conflict', trans_id))
1322
name = self.tt.final_name(trans_id)
1323
parent_id = self.tt.final_parent(trans_id)
1324
self._dump_conflicts(
1325
name, (base_path, other_path, this_path), parent_id, file_id)
1326
elif hook_status == 'delete':
1327
self.tt.unversion_file(trans_id)
1329
elif hook_status == 'done':
1330
# The hook function did whatever it needs to do directly, no
1331
# further action needed here.
1334
raise AssertionError('unknown hook_status: %r' % (hook_status,))
1335
if not this_path and result == "modified":
1336
self.tt.version_file(file_id, trans_id)
1338
# The merge has been performed and produced a new content, so the
1339
# old contents should not be retained.
1340
self.tt.delete_contents(trans_id)
1343
def _default_other_winner_merge(self, merge_hook_params):
1344
"""Replace this contents with other."""
1345
trans_id = merge_hook_params.trans_id
1346
if merge_hook_params.other_path is not None:
1347
# OTHER changed the file
1348
transform.create_from_tree(
1349
self.tt, trans_id, self.other_tree,
1350
merge_hook_params.other_path,
1351
filter_tree_path=self._get_filter_tree_path(merge_hook_params.other_path))
1353
elif merge_hook_params.this_path is not None:
1354
# OTHER deleted the file
1355
return 'delete', None
1357
raise AssertionError(
1358
'winner is OTHER, but file %r not in THIS or OTHER tree'
1359
% (merge_hook_params.base_path,))
1361
def merge_contents(self, merge_hook_params):
1362
"""Fallback merge logic after user installed hooks."""
1363
# This function is used in merge hooks as the fallback instance.
1364
# Perhaps making this function and the functions it calls be a
1365
# a separate class would be better.
1366
if merge_hook_params.winner == 'other':
1367
# OTHER is a straight winner, so replace this contents with other
1368
return self._default_other_winner_merge(merge_hook_params)
1369
elif merge_hook_params.is_file_merge():
1370
# THIS and OTHER are both files, so text merge. Either
1371
# BASE is a file, or both converted to files, so at least we
1372
# have agreement that output should be a file.
1374
self.text_merge(merge_hook_params.trans_id,
1375
merge_hook_params.paths)
1376
except errors.BinaryFile:
1377
return 'not_applicable', None
1380
return 'not_applicable', None
1382
def get_lines(self, tree, path):
1383
"""Return the lines in a file, or an empty list."""
1387
kind = tree.kind(path)
1388
except errors.NoSuchFile:
1393
return tree.get_file_lines(path)
1395
def text_merge(self, trans_id, paths):
1396
"""Perform a three-way text merge on a file"""
1397
# it's possible that we got here with base as a different type.
1398
# if so, we just want two-way text conflicts.
1399
base_path, other_path, this_path = paths
1400
base_lines = self.get_lines(self.base_tree, base_path)
1401
other_lines = self.get_lines(self.other_tree, other_path)
1402
this_lines = self.get_lines(self.this_tree, this_path)
1403
m3 = merge3.Merge3(base_lines, this_lines, other_lines,
1404
is_cherrypick=self.cherrypick)
1405
start_marker = b"!START OF MERGE CONFLICT!" + b"I HOPE THIS IS UNIQUE"
1406
if self.show_base is True:
1407
base_marker = b'|' * 7
1411
def iter_merge3(retval):
1412
retval["text_conflicts"] = False
1413
for line in m3.merge_lines(name_a=b"TREE",
1414
name_b=b"MERGE-SOURCE",
1415
name_base=b"BASE-REVISION",
1416
start_marker=start_marker,
1417
base_marker=base_marker,
1418
reprocess=self.reprocess):
1419
if line.startswith(start_marker):
1420
retval["text_conflicts"] = True
1421
yield line.replace(start_marker, b'<' * 7)
1425
merge3_iterator = iter_merge3(retval)
1426
self.tt.create_file(merge3_iterator, trans_id)
1427
if retval["text_conflicts"] is True:
1428
self._raw_conflicts.append(('text conflict', trans_id))
1429
name = self.tt.final_name(trans_id)
1430
parent_id = self.tt.final_parent(trans_id)
1431
file_id = self.tt.final_file_id(trans_id)
1432
file_group = self._dump_conflicts(name, paths, parent_id, file_id,
1433
this_lines, base_lines,
1435
file_group.append(trans_id)
1437
def _get_filter_tree_path(self, path):
1438
if self.this_tree.supports_content_filtering():
1439
# We get the path from the working tree if it exists.
1440
# That fails though when OTHER is adding a file, so
1441
# we fall back to the other tree to find the path if
1442
# it doesn't exist locally.
1443
filter_path = _mod_tree.find_previous_path(
1444
self.other_tree, self.working_tree, path)
1445
if filter_path is None:
1448
# Skip the lookup for older formats
1451
def _dump_conflicts(self, name, paths, parent_id, file_id, this_lines=None,
1452
base_lines=None, other_lines=None, set_version=False,
1454
"""Emit conflict files.
1455
If this_lines, base_lines, or other_lines are omitted, they will be
1456
determined automatically. If set_version is true, the .OTHER, .THIS
1457
or .BASE (in that order) will be created as versioned files.
1459
base_path, other_path, this_path = paths
1460
data = [('OTHER', self.other_tree, other_path, other_lines),
1461
('THIS', self.this_tree, this_path, this_lines)]
1463
data.append(('BASE', self.base_tree, base_path, base_lines))
1465
# We need to use the actual path in the working tree of the file here,
1466
# ignoring the conflict suffixes
1468
if wt.supports_content_filtering():
1470
filter_tree_path = wt.id2path(file_id)
1471
except errors.NoSuchId:
1472
# file has been deleted
1473
filter_tree_path = None
1475
# Skip the id2path lookup for older formats
1476
filter_tree_path = None
1480
for suffix, tree, path, lines in data:
1481
if path is not None:
1482
trans_id = self._conflict_file(
1483
name, parent_id, path, tree, suffix, lines,
1485
file_group.append(trans_id)
1486
if set_version and not versioned:
1487
self.tt.version_file(file_id, trans_id)
1491
def _conflict_file(self, name, parent_id, path, tree, suffix,
1492
lines=None, filter_tree_path=None):
1493
"""Emit a single conflict file."""
1494
name = name + '.' + suffix
1495
trans_id = self.tt.create_path(name, parent_id)
1496
transform.create_from_tree(
1497
self.tt, trans_id, tree, path,
1499
filter_tree_path=filter_tree_path)
1502
def _merge_executable(self, paths, trans_id, executable, file_status,
1504
"""Perform a merge on the execute bit."""
1505
base_executable, other_executable, this_executable = executable
1506
base_path, other_path, this_path = paths
1507
if file_status == "deleted":
1509
winner = resolver(*executable)
1510
if winner == "conflict":
1511
# There must be a None in here, if we have a conflict, but we
1512
# need executability since file status was not deleted.
1513
if other_path is None:
1517
if winner == 'this' and file_status != "modified":
1519
if self.tt.final_kind(trans_id) != "file":
1521
if winner == "this":
1522
executability = this_executable
1524
if other_path is not None:
1525
executability = other_executable
1526
elif this_path is not None:
1527
executability = this_executable
1528
elif base_path is not None:
1529
executability = base_executable
1530
if executability is not None:
1531
self.tt.set_executability(executability, trans_id)
1533
def cook_conflicts(self, fs_conflicts):
1534
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1535
content_conflict_file_ids = set()
1536
cooked_conflicts = transform.cook_conflicts(fs_conflicts, self.tt)
1537
fp = transform.FinalPaths(self.tt)
1538
for conflict in self._raw_conflicts:
1539
conflict_type = conflict[0]
1540
if conflict_type == 'path conflict':
1542
this_parent, this_name,
1543
other_parent, other_name) = conflict[1:]
1544
if this_parent is None or this_name is None:
1545
this_path = '<deleted>'
1547
parent_path = fp.get_path(
1548
self.tt.trans_id_file_id(this_parent))
1549
this_path = osutils.pathjoin(parent_path, this_name)
1550
if other_parent is None or other_name is None:
1551
other_path = '<deleted>'
1553
if other_parent == self.other_tree.path2id(''):
1554
# The tree transform doesn't know about the other root,
1555
# so we special case here to avoid a NoFinalPath
1559
parent_path = fp.get_path(
1560
self.tt.trans_id_file_id(other_parent))
1561
other_path = osutils.pathjoin(parent_path, other_name)
1562
c = _mod_conflicts.Conflict.factory(
1563
'path conflict', path=this_path,
1564
conflict_path=other_path,
1566
elif conflict_type == 'contents conflict':
1567
for trans_id in conflict[1]:
1568
file_id = self.tt.final_file_id(trans_id)
1569
if file_id is not None:
1570
# Ok we found the relevant file-id
1572
path = fp.get_path(trans_id)
1573
for suffix in ('.BASE', '.THIS', '.OTHER'):
1574
if path.endswith(suffix):
1575
# Here is the raw path
1576
path = path[:-len(suffix)]
1578
c = _mod_conflicts.Conflict.factory(conflict_type,
1579
path=path, file_id=file_id)
1580
content_conflict_file_ids.add(file_id)
1581
elif conflict_type == 'text conflict':
1582
trans_id = conflict[1]
1583
path = fp.get_path(trans_id)
1584
file_id = self.tt.final_file_id(trans_id)
1585
c = _mod_conflicts.Conflict.factory(conflict_type,
1586
path=path, file_id=file_id)
1588
raise AssertionError('bad conflict type: %r' % (conflict,))
1589
cooked_conflicts.append(c)
1591
self.cooked_conflicts = []
1592
# We want to get rid of path conflicts when a corresponding contents
1593
# conflict exists. This can occur when one branch deletes a file while
1594
# the other renames *and* modifies it. In this case, the content
1595
# conflict is enough.
1596
for c in cooked_conflicts:
1597
if (c.typestring == 'path conflict'
1598
and c.file_id in content_conflict_file_ids):
1600
self.cooked_conflicts.append(c)
1601
self.cooked_conflicts.sort(key=_mod_conflicts.Conflict.sort_key)
1604
class WeaveMerger(Merge3Merger):
1605
"""Three-way tree merger, text weave merger."""
1606
supports_reprocess = True
1607
supports_show_base = False
1608
supports_reverse_cherrypick = False
1609
history_based = True
1610
requires_file_merge_plan = True
1612
def _generate_merge_plan(self, this_path, base):
1613
return self.this_tree.plan_file_merge(this_path, self.other_tree,
1616
def _merged_lines(self, this_path):
1617
"""Generate the merged lines.
1618
There is no distinction between lines that are meant to contain <<<<<<<
1622
base = self.base_tree
1625
plan = self._generate_merge_plan(this_path, base)
1626
if 'merge' in debug.debug_flags:
1628
trans_id = self.tt.trans_id_file_id(file_id)
1629
name = self.tt.final_name(trans_id) + '.plan'
1630
contents = (b'%11s|%s' % l for l in plan)
1631
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1632
textmerge = versionedfile.PlanWeaveMerge(plan, b'<<<<<<< TREE\n',
1633
b'>>>>>>> MERGE-SOURCE\n')
1634
lines, conflicts = textmerge.merge_lines(self.reprocess)
1636
base_lines = textmerge.base_from_plan()
1639
return lines, base_lines
1641
def text_merge(self, trans_id, paths):
1642
"""Perform a (weave) text merge for a given file and file-id.
1643
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1644
and a conflict will be noted.
1646
base_path, other_path, this_path = paths
1647
lines, base_lines = self._merged_lines(this_path)
1649
# Note we're checking whether the OUTPUT is binary in this case,
1650
# because we don't want to get into weave merge guts.
1651
textfile.check_text_lines(lines)
1652
self.tt.create_file(lines, trans_id)
1653
if base_lines is not None:
1655
self._raw_conflicts.append(('text conflict', trans_id))
1656
name = self.tt.final_name(trans_id)
1657
parent_id = self.tt.final_parent(trans_id)
1658
file_id = self.tt.final_file_id(trans_id)
1659
file_group = self._dump_conflicts(name, paths, parent_id, file_id,
1661
base_lines=base_lines)
1662
file_group.append(trans_id)
1665
class LCAMerger(WeaveMerger):
1667
requires_file_merge_plan = True
1669
def _generate_merge_plan(self, this_path, base):
1670
return self.this_tree.plan_file_lca_merge(this_path, self.other_tree,
1674
class Diff3Merger(Merge3Merger):
1675
"""Three-way merger using external diff3 for text merging"""
1677
requires_file_merge_plan = False
1679
def dump_file(self, temp_dir, name, tree, path):
1680
out_path = osutils.pathjoin(temp_dir, name)
1681
with open(out_path, "wb") as out_file:
1682
in_file = tree.get_file(path)
1683
for line in in_file:
1684
out_file.write(line)
1687
def text_merge(self, trans_id, paths):
1688
"""Perform a diff3 merge using a specified file-id and trans-id.
1689
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1690
will be dumped, and a will be conflict noted.
1693
base_path, other_path, this_path = paths
1694
temp_dir = osutils.mkdtemp(prefix="bzr-")
1696
new_file = osutils.pathjoin(temp_dir, "new")
1697
this = self.dump_file(
1698
temp_dir, "this", self.this_tree, this_path)
1699
base = self.dump_file(
1700
temp_dir, "base", self.base_tree, base_path)
1701
other = self.dump_file(
1702
temp_dir, "other", self.other_tree, other_path)
1703
status = breezy.patch.diff3(new_file, this, base, other)
1704
if status not in (0, 1):
1705
raise errors.BzrError("Unhandled diff3 exit code")
1706
with open(new_file, 'rb') as f:
1707
self.tt.create_file(f, trans_id)
1709
name = self.tt.final_name(trans_id)
1710
parent_id = self.tt.final_parent(trans_id)
1711
file_id = self.tt.final_file_id(trans_id)
1712
self._dump_conflicts(name, paths, parent_id, file_id)
1713
self._raw_conflicts.append(('text conflict', trans_id))
1715
osutils.rmtree(temp_dir)
1718
class PathNotInTree(errors.BzrError):
1720
_fmt = """Merge-into failed because %(tree)s does not contain %(path)s."""
1722
def __init__(self, path, tree):
1723
errors.BzrError.__init__(self, path=path, tree=tree)
1726
class MergeIntoMerger(Merger):
1727
"""Merger that understands other_tree will be merged into a subdir.
1729
This also changes the Merger api so that it uses real Branch, revision_id,
1730
and RevisonTree objects, rather than using revision specs.
1733
def __init__(self, this_tree, other_branch, other_tree, target_subdir,
1734
source_subpath, other_rev_id=None):
1735
"""Create a new MergeIntoMerger object.
1737
source_subpath in other_tree will be effectively copied to
1738
target_subdir in this_tree.
1740
:param this_tree: The tree that we will be merging into.
1741
:param other_branch: The Branch we will be merging from.
1742
:param other_tree: The RevisionTree object we want to merge.
1743
:param target_subdir: The relative path where we want to merge
1744
other_tree into this_tree
1745
:param source_subpath: The relative path specifying the subtree of
1746
other_tree to merge into this_tree.
1748
# It is assumed that we are merging a tree that is not in our current
1749
# ancestry, which means we are using the "EmptyTree" as our basis.
1750
null_ancestor_tree = this_tree.branch.repository.revision_tree(
1751
_mod_revision.NULL_REVISION)
1752
super(MergeIntoMerger, self).__init__(
1753
this_branch=this_tree.branch,
1754
this_tree=this_tree,
1755
other_tree=other_tree,
1756
base_tree=null_ancestor_tree,
1758
self._target_subdir = target_subdir
1759
self._source_subpath = source_subpath
1760
self.other_branch = other_branch
1761
if other_rev_id is None:
1762
other_rev_id = other_tree.get_revision_id()
1763
self.other_rev_id = self.other_basis = other_rev_id
1764
self.base_is_ancestor = True
1765
self.backup_files = True
1766
self.merge_type = Merge3Merger
1767
self.show_base = False
1768
self.reprocess = False
1769
self.interesting_files = None
1770
self.merge_type = _MergeTypeParameterizer(MergeIntoMergeType,
1771
target_subdir=self._target_subdir,
1772
source_subpath=self._source_subpath)
1773
if self._source_subpath != '':
1774
# If this isn't a partial merge make sure the revisions will be
1776
self._maybe_fetch(self.other_branch, self.this_branch,
1779
def set_pending(self):
1780
if self._source_subpath != '':
1782
Merger.set_pending(self)
1785
class _MergeTypeParameterizer(object):
1786
"""Wrap a merge-type class to provide extra parameters.
1788
This is hack used by MergeIntoMerger to pass some extra parameters to its
1789
merge_type. Merger.do_merge() sets up its own set of parameters to pass to
1790
the 'merge_type' member. It is difficult override do_merge without
1791
re-writing the whole thing, so instead we create a wrapper which will pass
1792
the extra parameters.
1795
def __init__(self, merge_type, **kwargs):
1796
self._extra_kwargs = kwargs
1797
self._merge_type = merge_type
1799
def __call__(self, *args, **kwargs):
1800
kwargs.update(self._extra_kwargs)
1801
return self._merge_type(*args, **kwargs)
1803
def __getattr__(self, name):
1804
return getattr(self._merge_type, name)
1807
class MergeIntoMergeType(Merge3Merger):
1808
"""Merger that incorporates a tree (or part of a tree) into another."""
1810
def __init__(self, *args, **kwargs):
1811
"""Initialize the merger object.
1813
:param args: See Merge3Merger.__init__'s args.
1814
:param kwargs: See Merge3Merger.__init__'s keyword args, except for
1815
source_subpath and target_subdir.
1816
:keyword source_subpath: The relative path specifying the subtree of
1817
other_tree to merge into this_tree.
1818
:keyword target_subdir: The relative path where we want to merge
1819
other_tree into this_tree
1821
# All of the interesting work happens during Merge3Merger.__init__(),
1822
# so we have have to hack in to get our extra parameters set.
1823
self._source_subpath = kwargs.pop('source_subpath')
1824
self._target_subdir = kwargs.pop('target_subdir')
1825
super(MergeIntoMergeType, self).__init__(*args, **kwargs)
1827
def _compute_transform(self):
1828
with ui.ui_factory.nested_progress_bar() as child_pb:
1829
entries = self._entries_to_incorporate()
1830
entries = list(entries)
1831
for num, (entry, parent_id, relpath) in enumerate(entries):
1832
child_pb.update(gettext('Preparing file merge'),
1834
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1835
path = osutils.pathjoin(self._source_subpath, relpath)
1836
trans_id = transform.new_by_entry(path, self.tt, entry,
1837
parent_trans_id, self.other_tree)
1838
self._finish_computing_transform()
1840
def _entries_to_incorporate(self):
1841
"""Yields pairs of (inventory_entry, new_parent)."""
1842
subdir_id = self.other_tree.path2id(self._source_subpath)
1843
if subdir_id is None:
1844
# XXX: The error would be clearer if it gave the URL of the source
1845
# branch, but we don't have a reference to that here.
1846
raise PathNotInTree(self._source_subpath, "Source tree")
1847
subdir = next(self.other_tree.iter_entries_by_dir(
1848
specific_files=[self._source_subpath]))[1]
1849
parent_in_target = osutils.dirname(self._target_subdir)
1850
target_id = self.this_tree.path2id(parent_in_target)
1851
if target_id is None:
1852
raise PathNotInTree(self._target_subdir, "Target tree")
1853
name_in_target = osutils.basename(self._target_subdir)
1854
merge_into_root = subdir.copy()
1855
merge_into_root.name = name_in_target
1857
self.this_tree.id2path(merge_into_root.file_id)
1858
except errors.NoSuchId:
1861
# Give the root a new file-id.
1862
# This can happen fairly easily if the directory we are
1863
# incorporating is the root, and both trees have 'TREE_ROOT' as
1864
# their root_id. Users will expect this to Just Work, so we
1865
# change the file-id here.
1866
# Non-root file-ids could potentially conflict too. That's really
1867
# an edge case, so we don't do anything special for those. We let
1868
# them cause conflicts.
1869
merge_into_root.file_id = generate_ids.gen_file_id(name_in_target)
1870
yield (merge_into_root, target_id, '')
1871
if subdir.kind != 'directory':
1872
# No children, so we are done.
1874
for path, entry in self.other_tree.root_inventory.iter_entries_by_dir(subdir_id):
1875
parent_id = entry.parent_id
1876
if parent_id == subdir.file_id:
1877
# The root's parent ID has changed, so make sure children of
1878
# the root refer to the new ID.
1879
parent_id = merge_into_root.file_id
1880
yield (entry, parent_id, path)
1883
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1885
merge_type=Merge3Merger,
1889
interesting_files=None,
1891
change_reporter=None):
1892
"""Primary interface for merging.
1894
Typical use is probably::
1896
merge_inner(branch, branch.get_revision_tree(other_revision),
1897
branch.get_revision_tree(base_revision))
1899
if this_tree is None:
1900
raise errors.BzrError("breezy.merge.merge_inner requires a this_tree "
1902
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1903
change_reporter=change_reporter)
1904
merger.backup_files = backup_files
1905
merger.merge_type = merge_type
1906
merger.ignore_zero = ignore_zero
1907
merger.interesting_files = interesting_files
1908
merger.show_base = show_base
1909
merger.reprocess = reprocess
1910
merger.other_rev_id = other_rev_id
1911
merger.other_basis = other_rev_id
1912
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1913
if get_revision_id is None:
1914
get_revision_id = base_tree.last_revision
1915
merger.cache_trees_with_revision_ids([other_tree, base_tree, this_tree])
1916
merger.set_base_revision(get_revision_id(), this_branch)
1917
return merger.do_merge()
1920
merge_type_registry = registry.Registry()
1921
merge_type_registry.register('diff3', Diff3Merger,
1922
"Merge using external diff3.")
1923
merge_type_registry.register('lca', LCAMerger,
1924
"LCA-newness merge.")
1925
merge_type_registry.register('merge3', Merge3Merger,
1926
"Native diff3-style merge.")
1927
merge_type_registry.register('weave', WeaveMerger,
1928
"Weave-based merge.")
1931
def get_merge_type_registry():
1932
"""Merge type registry was previously in breezy.option
1934
This method provides a backwards compatible way to retrieve it.
1936
return merge_type_registry
1939
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1940
def status_a(revision, text):
1941
if revision in ancestors_b:
1942
return 'killed-b', text
1944
return 'new-a', text
1946
def status_b(revision, text):
1947
if revision in ancestors_a:
1948
return 'killed-a', text
1950
return 'new-b', text
1952
plain_a = [t for (a, t) in annotated_a]
1953
plain_b = [t for (a, t) in annotated_b]
1954
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1955
blocks = matcher.get_matching_blocks()
1958
for ai, bi, l in blocks:
1959
# process all mismatched sections
1960
# (last mismatched section is handled because blocks always
1961
# includes a 0-length last block)
1962
for revision, text in annotated_a[a_cur:ai]:
1963
yield status_a(revision, text)
1964
for revision, text in annotated_b[b_cur:bi]:
1965
yield status_b(revision, text)
1966
# and now the matched section
1969
for text_a in plain_a[ai:a_cur]:
1970
yield "unchanged", text_a
1973
class _PlanMergeBase(object):
1975
def __init__(self, a_rev, b_rev, vf, key_prefix):
1978
:param a_rev: Revision-id of one revision to merge
1979
:param b_rev: Revision-id of the other revision to merge
1980
:param vf: A VersionedFiles containing both revisions
1981
:param key_prefix: A prefix for accessing keys in vf, typically
1987
self._last_lines = None
1988
self._last_lines_revision_id = None
1989
self._cached_matching_blocks = {}
1990
self._key_prefix = key_prefix
1991
self._precache_tip_lines()
1993
def _precache_tip_lines(self):
1994
lines = self.get_lines([self.a_rev, self.b_rev])
1995
self.lines_a = lines[self.a_rev]
1996
self.lines_b = lines[self.b_rev]
1998
def get_lines(self, revisions):
1999
"""Get lines for revisions from the backing VersionedFiles.
2001
:raises RevisionNotPresent: on absent texts.
2003
keys = [(self._key_prefix + (rev,)) for rev in revisions]
2005
for record in self.vf.get_record_stream(keys, 'unordered', True):
2006
if record.storage_kind == 'absent':
2007
raise errors.RevisionNotPresent(record.key, self.vf)
2008
result[record.key[-1]] = record.get_bytes_as('lines')
2011
def plan_merge(self):
2012
"""Generate a 'plan' for merging the two revisions.
2014
This involves comparing their texts and determining the cause of
2015
differences. If text A has a line and text B does not, then either the
2016
line was added to text A, or it was deleted from B. Once the causes
2017
are combined, they are written out in the format described in
2018
VersionedFile.plan_merge
2020
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
2021
unique_a, unique_b = self._unique_lines(blocks)
2022
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
2023
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
2024
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
2026
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
2029
for i, j, n in blocks:
2030
for a_index in range(last_i, i):
2031
if a_index in new_a:
2032
if a_index in killed_b:
2033
yield 'conflicted-a', self.lines_a[a_index]
2035
yield 'new-a', self.lines_a[a_index]
2037
yield 'killed-b', self.lines_a[a_index]
2038
for b_index in range(last_j, j):
2039
if b_index in new_b:
2040
if b_index in killed_a:
2041
yield 'conflicted-b', self.lines_b[b_index]
2043
yield 'new-b', self.lines_b[b_index]
2045
yield 'killed-a', self.lines_b[b_index]
2046
# handle common lines
2047
for a_index in range(i, i + n):
2048
yield 'unchanged', self.lines_a[a_index]
2052
def _get_matching_blocks(self, left_revision, right_revision):
2053
"""Return a description of which sections of two revisions match.
2055
See SequenceMatcher.get_matching_blocks
2057
cached = self._cached_matching_blocks.get((left_revision,
2059
if cached is not None:
2061
if self._last_lines_revision_id == left_revision:
2062
left_lines = self._last_lines
2063
right_lines = self.get_lines([right_revision])[right_revision]
2065
lines = self.get_lines([left_revision, right_revision])
2066
left_lines = lines[left_revision]
2067
right_lines = lines[right_revision]
2068
self._last_lines = right_lines
2069
self._last_lines_revision_id = right_revision
2070
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
2072
return matcher.get_matching_blocks()
2074
def _unique_lines(self, matching_blocks):
2075
"""Analyse matching_blocks to determine which lines are unique
2077
:return: a tuple of (unique_left, unique_right), where the values are
2078
sets of line numbers of unique lines.
2084
for i, j, n in matching_blocks:
2085
unique_left.extend(range(last_i, i))
2086
unique_right.extend(range(last_j, j))
2089
return unique_left, unique_right
2092
def _subtract_plans(old_plan, new_plan):
2093
"""Remove changes from new_plan that came from old_plan.
2095
It is assumed that the difference between the old_plan and new_plan
2096
is their choice of 'b' text.
2098
All lines from new_plan that differ from old_plan are emitted
2099
verbatim. All lines from new_plan that match old_plan but are
2100
not about the 'b' revision are emitted verbatim.
2102
Lines that match and are about the 'b' revision are the lines we
2103
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
2104
is skipped entirely.
2106
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
2109
for i, j, n in matcher.get_matching_blocks():
2110
for jj in range(last_j, j):
2112
for jj in range(j, j + n):
2113
plan_line = new_plan[jj]
2114
if plan_line[0] == 'new-b':
2116
elif plan_line[0] == 'killed-b':
2117
yield 'unchanged', plan_line[1]
2123
class _PlanMerge(_PlanMergeBase):
2124
"""Plan an annotate merge using on-the-fly annotation"""
2126
def __init__(self, a_rev, b_rev, vf, key_prefix):
2127
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
2128
self.a_key = self._key_prefix + (self.a_rev,)
2129
self.b_key = self._key_prefix + (self.b_rev,)
2130
self.graph = _mod_graph.Graph(self.vf)
2131
heads = self.graph.heads((self.a_key, self.b_key))
2133
# one side dominates, so we can just return its values, yay for
2135
# Ideally we would know that before we get this far
2136
self._head_key = heads.pop()
2137
if self._head_key == self.a_key:
2141
trace.mutter('found dominating revision for %s\n%s > %s', self.vf,
2142
self._head_key[-1], other)
2145
self._head_key = None
2148
def _precache_tip_lines(self):
2149
# Turn this into a no-op, because we will do this later
2152
def _find_recursive_lcas(self):
2153
"""Find all the ancestors back to a unique lca"""
2154
cur_ancestors = (self.a_key, self.b_key)
2155
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
2156
# rather than a key tuple. We will just map that directly to no common
2160
next_lcas = self.graph.find_lca(*cur_ancestors)
2161
# Map a plain NULL_REVISION to a simple no-ancestors
2162
if next_lcas == {_mod_revision.NULL_REVISION}:
2164
# Order the lca's based on when they were merged into the tip
2165
# While the actual merge portion of weave merge uses a set() of
2166
# active revisions, the order of insertion *does* effect the
2167
# implicit ordering of the texts.
2168
for rev_key in cur_ancestors:
2169
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
2171
parent_map[rev_key] = ordered_parents
2172
if len(next_lcas) == 0:
2174
elif len(next_lcas) == 1:
2175
parent_map[list(next_lcas)[0]] = ()
2177
elif len(next_lcas) > 2:
2178
# More than 2 lca's, fall back to grabbing all nodes between
2179
# this and the unique lca.
2180
trace.mutter('More than 2 LCAs, falling back to all nodes for:'
2182
self.a_key, self.b_key, cur_ancestors)
2183
cur_lcas = next_lcas
2184
while len(cur_lcas) > 1:
2185
cur_lcas = self.graph.find_lca(*cur_lcas)
2186
if len(cur_lcas) == 0:
2187
# No common base to find, use the full ancestry
2190
unique_lca = list(cur_lcas)[0]
2191
if unique_lca == _mod_revision.NULL_REVISION:
2192
# find_lca will return a plain 'NULL_REVISION' rather
2193
# than a key tuple when there is no common ancestor, we
2194
# prefer to just use None, because it doesn't confuse
2195
# _get_interesting_texts()
2197
parent_map.update(self._find_unique_parents(next_lcas,
2200
cur_ancestors = next_lcas
2203
def _find_unique_parents(self, tip_keys, base_key):
2204
"""Find ancestors of tip that aren't ancestors of base.
2206
:param tip_keys: Nodes that are interesting
2207
:param base_key: Cull all ancestors of this node
2208
:return: The parent map for all revisions between tip_keys and
2209
base_key. base_key will be included. References to nodes outside of
2210
the ancestor set will also be removed.
2212
# TODO: this would be simpler if find_unique_ancestors took a list
2213
# instead of a single tip, internally it supports it, but it
2214
# isn't a "backwards compatible" api change.
2215
if base_key is None:
2216
parent_map = dict(self.graph.iter_ancestry(tip_keys))
2217
# We remove NULL_REVISION because it isn't a proper tuple key, and
2218
# thus confuses things like _get_interesting_texts, and our logic
2219
# to add the texts into the memory weave.
2220
if _mod_revision.NULL_REVISION in parent_map:
2221
parent_map.pop(_mod_revision.NULL_REVISION)
2224
for tip in tip_keys:
2226
self.graph.find_unique_ancestors(tip, [base_key]))
2227
parent_map = self.graph.get_parent_map(interesting)
2228
parent_map[base_key] = ()
2229
culled_parent_map, child_map, tails = self._remove_external_references(
2231
# Remove all the tails but base_key
2232
if base_key is not None:
2233
tails.remove(base_key)
2234
self._prune_tails(culled_parent_map, child_map, tails)
2235
# Now remove all the uninteresting 'linear' regions
2236
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
2240
def _remove_external_references(parent_map):
2241
"""Remove references that go outside of the parent map.
2243
:param parent_map: Something returned from Graph.get_parent_map(keys)
2244
:return: (filtered_parent_map, child_map, tails)
2245
filtered_parent_map is parent_map without external references
2246
child_map is the {parent_key: [child_keys]} mapping
2247
tails is a list of nodes that do not have any parents in the map
2249
# TODO: The basic effect of this function seems more generic than
2250
# _PlanMerge. But the specific details of building a child_map,
2251
# and computing tails seems very specific to _PlanMerge.
2252
# Still, should this be in Graph land?
2253
filtered_parent_map = {}
2256
for key, parent_keys in parent_map.items():
2257
culled_parent_keys = [p for p in parent_keys if p in parent_map]
2258
if not culled_parent_keys:
2260
for parent_key in culled_parent_keys:
2261
child_map.setdefault(parent_key, []).append(key)
2262
# TODO: Do we want to do this, it adds overhead for every node,
2263
# just to say that the node has no children
2264
child_map.setdefault(key, [])
2265
filtered_parent_map[key] = culled_parent_keys
2266
return filtered_parent_map, child_map, tails
2269
def _prune_tails(parent_map, child_map, tails_to_remove):
2270
"""Remove tails from the parent map.
2272
This will remove the supplied revisions until no more children have 0
2275
:param parent_map: A dict of {child: [parents]}, this dictionary will
2276
be modified in place.
2277
:param tails_to_remove: A list of tips that should be removed,
2278
this list will be consumed
2279
:param child_map: The reverse dict of parent_map ({parent: [children]})
2280
this dict will be modified
2281
:return: None, parent_map will be modified in place.
2283
while tails_to_remove:
2284
next = tails_to_remove.pop()
2285
parent_map.pop(next)
2286
children = child_map.pop(next)
2287
for child in children:
2288
child_parents = parent_map[child]
2289
child_parents.remove(next)
2290
if len(child_parents) == 0:
2291
tails_to_remove.append(child)
2293
def _get_interesting_texts(self, parent_map):
2294
"""Return a dict of texts we are interested in.
2296
Note that the input is in key tuples, but the output is in plain
2299
:param parent_map: The output from _find_recursive_lcas
2300
:return: A dict of {'revision_id':lines} as returned by
2301
_PlanMergeBase.get_lines()
2303
all_revision_keys = set(parent_map)
2304
all_revision_keys.add(self.a_key)
2305
all_revision_keys.add(self.b_key)
2307
# Everything else is in 'keys' but get_lines is in 'revision_ids'
2308
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
2311
def _build_weave(self):
2312
from .bzr import weave
2313
self._weave = weave.Weave(weave_name='in_memory_weave',
2314
allow_reserved=True)
2315
parent_map = self._find_recursive_lcas()
2317
all_texts = self._get_interesting_texts(parent_map)
2319
# Note: Unfortunately, the order given by topo_sort will effect the
2320
# ordering resolution in the output. Specifically, if you add A then B,
2321
# then in the output text A lines will show up before B lines. And, of
2322
# course, topo_sort doesn't guarantee any real ordering.
2323
# So we use merge_sort, and add a fake node on the tip.
2324
# This ensures that left-hand parents will always be inserted into the
2325
# weave before right-hand parents.
2326
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
2327
parent_map[tip_key] = (self.a_key, self.b_key)
2329
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
2333
# for key in tsort.topo_sort(parent_map):
2334
parent_keys = parent_map[key]
2335
revision_id = key[-1]
2336
parent_ids = [k[-1] for k in parent_keys]
2337
self._weave.add_lines(revision_id, parent_ids,
2338
all_texts[revision_id])
2340
def plan_merge(self):
2341
"""Generate a 'plan' for merging the two revisions.
2343
This involves comparing their texts and determining the cause of
2344
differences. If text A has a line and text B does not, then either the
2345
line was added to text A, or it was deleted from B. Once the causes
2346
are combined, they are written out in the format described in
2347
VersionedFile.plan_merge
2349
if self._head_key is not None: # There was a single head
2350
if self._head_key == self.a_key:
2353
if self._head_key != self.b_key:
2354
raise AssertionError('There was an invalid head: %s != %s'
2355
% (self.b_key, self._head_key))
2357
head_rev = self._head_key[-1]
2358
lines = self.get_lines([head_rev])[head_rev]
2359
return ((plan, line) for line in lines)
2360
return self._weave.plan_merge(self.a_rev, self.b_rev)
2363
class _PlanLCAMerge(_PlanMergeBase):
2365
This merge algorithm differs from _PlanMerge in that:
2367
1. comparisons are done against LCAs only
2368
2. cases where a contested line is new versus one LCA but old versus
2369
another are marked as conflicts, by emitting the line as conflicted-a
2372
This is faster, and hopefully produces more useful output.
2375
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
2376
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
2377
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
2380
if lca == _mod_revision.NULL_REVISION:
2383
self.lcas.add(lca[-1])
2384
for lca in self.lcas:
2385
if _mod_revision.is_null(lca):
2388
lca_lines = self.get_lines([lca])[lca]
2389
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
2391
blocks = list(matcher.get_matching_blocks())
2392
self._cached_matching_blocks[(a_rev, lca)] = blocks
2393
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2395
blocks = list(matcher.get_matching_blocks())
2396
self._cached_matching_blocks[(b_rev, lca)] = blocks
2398
def _determine_status(self, revision_id, unique_line_numbers):
2399
"""Determines the status unique lines versus all lcas.
2401
Basically, determines why the line is unique to this revision.
2403
A line may be determined new, killed, or both.
2405
If a line is determined new, that means it was not present in at least
2406
one LCA, and is not present in the other merge revision.
2408
If a line is determined killed, that means the line was present in
2411
If a line is killed and new, this indicates that the two merge
2412
revisions contain differing conflict resolutions.
2414
:param revision_id: The id of the revision in which the lines are
2416
:param unique_line_numbers: The line numbers of unique lines.
2417
:return: a tuple of (new_this, killed_other)
2421
unique_line_numbers = set(unique_line_numbers)
2422
for lca in self.lcas:
2423
blocks = self._get_matching_blocks(revision_id, lca)
2424
unique_vs_lca, _ignored = self._unique_lines(blocks)
2425
new.update(unique_line_numbers.intersection(unique_vs_lca))
2426
killed.update(unique_line_numbers.difference(unique_vs_lca))