1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
branch as _mod_branch,
20
conflicts as _mod_conflicts,
30
revision as _mod_revision,
39
from bzrlib.symbol_versioning import (
43
# TODO: Report back as changes are merged in
46
def transform_tree(from_tree, to_tree, interesting_ids=None):
47
from_tree.lock_tree_write()
49
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
50
interesting_ids=interesting_ids, this_tree=from_tree)
55
class MergeHooks(hooks.Hooks):
58
hooks.Hooks.__init__(self)
59
self.create_hook(hooks.HookPoint('merge_file_content',
60
"Called when file content needs to be merged (including when one "
61
"side has deleted the file and the other has changed it)."
62
"merge_file_content is called with a "
63
"bzrlib.merge.MergeHookParams. The function should return a tuple "
64
"of (status, lines), where status is one of 'not_applicable', "
65
"'success', 'conflicted', or 'delete'. If status is success or "
66
"conflicted, then lines should be an iterable of the new lines "
71
class MergeHookParams(object):
72
"""Object holding parameters passed to merge_file_content hooks.
74
There are 3 fields hooks can access:
76
:ivar merger: the Merger object
77
:ivar file_id: the file ID of the file being merged
78
:ivar trans_id: the transform ID for the merge of this file
79
:ivar this_kind: kind of file_id in 'this' tree
80
:ivar other_kind: kind of file_id in 'other' tree
81
:ivar winner: one of 'this', 'other', 'conflict'
84
def __init__(self, merger, file_id, trans_id, this_kind, other_kind,
87
self.file_id = file_id
88
self.trans_id = trans_id
89
self.this_kind = this_kind
90
self.other_kind = other_kind
93
def is_file_merge(self):
94
"""True if this_kind and other_kind are both 'file'."""
95
return self.this_kind == 'file' and self.other_kind == 'file'
97
@decorators.cachedproperty
99
"""The lines of the 'base' version of the file."""
100
return self.merger.get_lines(self.merger.base_tree, self.file_id)
102
@decorators.cachedproperty
103
def this_lines(self):
104
"""The lines of the 'this' version of the file."""
105
return self.merger.get_lines(self.merger.this_tree, self.file_id)
107
@decorators.cachedproperty
108
def other_lines(self):
109
"""The lines of the 'other' version of the file."""
110
return self.merger.get_lines(self.merger.other_tree, self.file_id)
113
class Merger(object):
117
def __init__(self, this_branch, other_tree=None, base_tree=None,
118
this_tree=None, pb=None, change_reporter=None,
119
recurse='down', revision_graph=None):
120
object.__init__(self)
121
self.this_branch = this_branch
122
self.this_basis = _mod_revision.ensure_null(
123
this_branch.last_revision())
124
self.this_rev_id = None
125
self.this_tree = this_tree
126
self.this_revision_tree = None
127
self.this_basis_tree = None
128
self.other_tree = other_tree
129
self.other_branch = None
130
self.base_tree = base_tree
131
self.ignore_zero = False
132
self.backup_files = False
133
self.interesting_ids = None
134
self.interesting_files = None
135
self.show_base = False
136
self.reprocess = False
138
pb = progress.DummyProgress()
141
self.recurse = recurse
142
self.change_reporter = change_reporter
143
self._cached_trees = {}
144
self._revision_graph = revision_graph
145
self._base_is_ancestor = None
146
self._base_is_other_ancestor = None
147
self._is_criss_cross = None
148
self._lca_trees = None
150
def cache_trees_with_revision_ids(self, trees):
151
"""Cache any tree in trees if it has a revision_id."""
152
for maybe_tree in trees:
153
if maybe_tree is None:
156
rev_id = maybe_tree.get_revision_id()
157
except AttributeError:
159
self._cached_trees[rev_id] = maybe_tree
162
def revision_graph(self):
163
if self._revision_graph is None:
164
self._revision_graph = self.this_branch.repository.get_graph()
165
return self._revision_graph
167
def _set_base_is_ancestor(self, value):
168
self._base_is_ancestor = value
170
def _get_base_is_ancestor(self):
171
if self._base_is_ancestor is None:
172
self._base_is_ancestor = self.revision_graph.is_ancestor(
173
self.base_rev_id, self.this_basis)
174
return self._base_is_ancestor
176
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
178
def _set_base_is_other_ancestor(self, value):
179
self._base_is_other_ancestor = value
181
def _get_base_is_other_ancestor(self):
182
if self._base_is_other_ancestor is None:
183
if self.other_basis is None:
185
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
186
self.base_rev_id, self.other_basis)
187
return self._base_is_other_ancestor
189
base_is_other_ancestor = property(_get_base_is_other_ancestor,
190
_set_base_is_other_ancestor)
193
def from_uncommitted(tree, other_tree, pb=None, base_tree=None):
194
"""Return a Merger for uncommitted changes in other_tree.
196
:param tree: The tree to merge into
197
:param other_tree: The tree to get uncommitted changes from
198
:param pb: A progress indicator
199
:param base_tree: The basis to use for the merge. If unspecified,
200
other_tree.basis_tree() will be used.
202
if base_tree is None:
203
base_tree = other_tree.basis_tree()
204
merger = Merger(tree.branch, other_tree, base_tree, tree, pb)
205
merger.base_rev_id = merger.base_tree.get_revision_id()
206
merger.other_rev_id = None
207
merger.other_basis = merger.base_rev_id
211
def from_mergeable(klass, tree, mergeable, pb):
212
"""Return a Merger for a bundle or merge directive.
214
:param tree: The tree to merge changes into
215
:param mergeable: A merge directive or bundle
216
:param pb: A progress indicator
218
mergeable.install_revisions(tree.branch.repository)
219
base_revision_id, other_revision_id, verified =\
220
mergeable.get_merge_request(tree.branch.repository)
221
revision_graph = tree.branch.repository.get_graph()
222
if base_revision_id is not None:
223
if (base_revision_id != _mod_revision.NULL_REVISION and
224
revision_graph.is_ancestor(
225
base_revision_id, tree.branch.last_revision())):
226
base_revision_id = None
228
trace.warning('Performing cherrypick')
229
merger = klass.from_revision_ids(pb, tree, other_revision_id,
230
base_revision_id, revision_graph=
232
return merger, verified
235
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
236
base_branch=None, revision_graph=None,
238
"""Return a Merger for revision-ids.
240
:param pb: A progress indicator
241
:param tree: The tree to merge changes into
242
:param other: The revision-id to use as OTHER
243
:param base: The revision-id to use as BASE. If not specified, will
245
:param other_branch: A branch containing the other revision-id. If
246
not supplied, tree.branch is used.
247
:param base_branch: A branch containing the base revision-id. If
248
not supplied, other_branch or tree.branch will be used.
249
:param revision_graph: If you have a revision_graph precomputed, pass
250
it in, otherwise it will be created for you.
251
:param tree_branch: The branch associated with tree. If not supplied,
252
tree.branch will be used.
254
if tree_branch is None:
255
tree_branch = tree.branch
256
merger = Merger(tree_branch, this_tree=tree, pb=pb,
257
revision_graph=revision_graph)
258
if other_branch is None:
259
other_branch = tree.branch
260
merger.set_other_revision(other, other_branch)
264
if base_branch is None:
265
base_branch = other_branch
266
merger.set_base_revision(base, base_branch)
269
def revision_tree(self, revision_id, branch=None):
270
if revision_id not in self._cached_trees:
272
branch = self.this_branch
274
tree = self.this_tree.revision_tree(revision_id)
275
except errors.NoSuchRevisionInTree:
276
tree = branch.repository.revision_tree(revision_id)
277
self._cached_trees[revision_id] = tree
278
return self._cached_trees[revision_id]
280
def _get_tree(self, treespec, possible_transports=None):
281
from bzrlib import workingtree
282
location, revno = treespec
284
tree = workingtree.WorkingTree.open_containing(location)[0]
285
return tree.branch, tree
286
branch = _mod_branch.Branch.open_containing(
287
location, possible_transports)[0]
289
revision_id = branch.last_revision()
291
revision_id = branch.get_rev_id(revno)
292
revision_id = _mod_revision.ensure_null(revision_id)
293
return branch, self.revision_tree(revision_id, branch)
295
@deprecated_method(deprecated_in((2, 1, 0)))
296
def ensure_revision_trees(self):
297
if self.this_revision_tree is None:
298
self.this_basis_tree = self.revision_tree(self.this_basis)
299
if self.this_basis == self.this_rev_id:
300
self.this_revision_tree = self.this_basis_tree
302
if self.other_rev_id is None:
303
other_basis_tree = self.revision_tree(self.other_basis)
304
if other_basis_tree.has_changes(self.other_tree):
305
raise errors.WorkingTreeNotRevision(self.this_tree)
306
other_rev_id = self.other_basis
307
self.other_tree = other_basis_tree
309
@deprecated_method(deprecated_in((2, 1, 0)))
310
def file_revisions(self, file_id):
311
self.ensure_revision_trees()
312
def get_id(tree, file_id):
313
revision_id = tree.inventory[file_id].revision
315
if self.this_rev_id is None:
316
if self.this_basis_tree.get_file_sha1(file_id) != \
317
self.this_tree.get_file_sha1(file_id):
318
raise errors.WorkingTreeNotRevision(self.this_tree)
320
trees = (self.this_basis_tree, self.other_tree)
321
return [get_id(tree, file_id) for tree in trees]
323
@deprecated_method(deprecated_in((2, 1, 0)))
324
def check_basis(self, check_clean, require_commits=True):
325
if self.this_basis is None and require_commits is True:
326
raise errors.BzrCommandError(
327
"This branch has no commits."
328
" (perhaps you would prefer 'bzr pull')")
331
if self.this_basis != self.this_rev_id:
332
raise errors.UncommittedChanges(self.this_tree)
334
@deprecated_method(deprecated_in((2, 1, 0)))
335
def compare_basis(self):
337
basis_tree = self.revision_tree(self.this_tree.last_revision())
338
except errors.NoSuchRevision:
339
basis_tree = self.this_tree.basis_tree()
340
if not self.this_tree.has_changes(basis_tree):
341
self.this_rev_id = self.this_basis
343
def set_interesting_files(self, file_list):
344
self.interesting_files = file_list
346
def set_pending(self):
347
if (not self.base_is_ancestor or not self.base_is_other_ancestor
348
or self.other_rev_id is None):
352
def _add_parent(self):
353
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
354
new_parent_trees = []
355
for revision_id in new_parents:
357
tree = self.revision_tree(revision_id)
358
except errors.NoSuchRevision:
362
new_parent_trees.append((revision_id, tree))
364
self.this_tree.set_parent_trees(new_parent_trees,
365
allow_leftmost_as_ghost=True)
367
for _revision_id, tree in new_parent_trees:
371
def set_other(self, other_revision, possible_transports=None):
372
"""Set the revision and tree to merge from.
374
This sets the other_tree, other_rev_id, other_basis attributes.
376
:param other_revision: The [path, revision] list to merge from.
378
self.other_branch, self.other_tree = self._get_tree(other_revision,
380
if other_revision[1] == -1:
381
self.other_rev_id = _mod_revision.ensure_null(
382
self.other_branch.last_revision())
383
if _mod_revision.is_null(self.other_rev_id):
384
raise errors.NoCommits(self.other_branch)
385
self.other_basis = self.other_rev_id
386
elif other_revision[1] is not None:
387
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
388
self.other_basis = self.other_rev_id
390
self.other_rev_id = None
391
self.other_basis = self.other_branch.last_revision()
392
if self.other_basis is None:
393
raise errors.NoCommits(self.other_branch)
394
if self.other_rev_id is not None:
395
self._cached_trees[self.other_rev_id] = self.other_tree
396
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
398
def set_other_revision(self, revision_id, other_branch):
399
"""Set 'other' based on a branch and revision id
401
:param revision_id: The revision to use for a tree
402
:param other_branch: The branch containing this tree
404
self.other_rev_id = revision_id
405
self.other_branch = other_branch
406
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
407
self.other_tree = self.revision_tree(revision_id)
408
self.other_basis = revision_id
410
def set_base_revision(self, revision_id, branch):
411
"""Set 'base' based on a branch and revision id
413
:param revision_id: The revision to use for a tree
414
:param branch: The branch containing this tree
416
self.base_rev_id = revision_id
417
self.base_branch = branch
418
self._maybe_fetch(branch, self.this_branch, revision_id)
419
self.base_tree = self.revision_tree(revision_id)
421
def _maybe_fetch(self, source, target, revision_id):
422
if not source.repository.has_same_location(target.repository):
423
target.fetch(source, revision_id)
426
revisions = [_mod_revision.ensure_null(self.this_basis),
427
_mod_revision.ensure_null(self.other_basis)]
428
if _mod_revision.NULL_REVISION in revisions:
429
self.base_rev_id = _mod_revision.NULL_REVISION
430
self.base_tree = self.revision_tree(self.base_rev_id)
431
self._is_criss_cross = False
433
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
434
self._is_criss_cross = False
436
self.base_rev_id = _mod_revision.NULL_REVISION
438
self.base_rev_id = list(lcas)[0]
439
else: # len(lcas) > 1
441
# find_unique_lca can only handle 2 nodes, so we have to
442
# start back at the beginning. It is a shame to traverse
443
# the graph again, but better than re-implementing
445
self.base_rev_id = self.revision_graph.find_unique_lca(
446
revisions[0], revisions[1])
448
self.base_rev_id = self.revision_graph.find_unique_lca(
450
self._is_criss_cross = True
451
if self.base_rev_id == _mod_revision.NULL_REVISION:
452
raise errors.UnrelatedBranches()
453
if self._is_criss_cross:
454
trace.warning('Warning: criss-cross merge encountered. See bzr'
455
' help criss-cross.')
456
trace.mutter('Criss-cross lcas: %r' % lcas)
457
interesting_revision_ids = [self.base_rev_id]
458
interesting_revision_ids.extend(lcas)
459
interesting_trees = dict((t.get_revision_id(), t)
460
for t in self.this_branch.repository.revision_trees(
461
interesting_revision_ids))
462
self._cached_trees.update(interesting_trees)
463
self.base_tree = interesting_trees.pop(self.base_rev_id)
464
sorted_lca_keys = self.revision_graph.find_merge_order(
466
self._lca_trees = [interesting_trees[key]
467
for key in sorted_lca_keys]
469
self.base_tree = self.revision_tree(self.base_rev_id)
470
self.base_is_ancestor = True
471
self.base_is_other_ancestor = True
472
trace.mutter('Base revid: %r' % self.base_rev_id)
474
def set_base(self, base_revision):
475
"""Set the base revision to use for the merge.
477
:param base_revision: A 2-list containing a path and revision number.
479
trace.mutter("doing merge() with no base_revision specified")
480
if base_revision == [None, None]:
483
base_branch, self.base_tree = self._get_tree(base_revision)
484
if base_revision[1] == -1:
485
self.base_rev_id = base_branch.last_revision()
486
elif base_revision[1] is None:
487
self.base_rev_id = _mod_revision.NULL_REVISION
489
self.base_rev_id = _mod_revision.ensure_null(
490
base_branch.get_rev_id(base_revision[1]))
491
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
493
def make_merger(self):
494
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
495
'other_tree': self.other_tree,
496
'interesting_ids': self.interesting_ids,
497
'interesting_files': self.interesting_files,
498
'pp': self.pp, 'this_branch': self.this_branch,
500
if self.merge_type.requires_base:
501
kwargs['base_tree'] = self.base_tree
502
if self.merge_type.supports_reprocess:
503
kwargs['reprocess'] = self.reprocess
505
raise errors.BzrError(
506
"Conflict reduction is not supported for merge"
507
" type %s." % self.merge_type)
508
if self.merge_type.supports_show_base:
509
kwargs['show_base'] = self.show_base
511
raise errors.BzrError("Showing base is not supported for this"
512
" merge type. %s" % self.merge_type)
513
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
514
and not self.base_is_other_ancestor):
515
raise errors.CannotReverseCherrypick()
516
if self.merge_type.supports_cherrypick:
517
kwargs['cherrypick'] = (not self.base_is_ancestor or
518
not self.base_is_other_ancestor)
519
if self._is_criss_cross and getattr(self.merge_type,
520
'supports_lca_trees', False):
521
kwargs['lca_trees'] = self._lca_trees
522
return self.merge_type(pb=self._pb,
523
change_reporter=self.change_reporter,
526
def _do_merge_to(self, merge):
527
if self.other_branch is not None:
528
self.other_branch.update_references(self.this_branch)
530
if self.recurse == 'down':
531
for relpath, file_id in self.this_tree.iter_references():
532
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
533
other_revision = self.other_tree.get_reference_revision(
535
if other_revision == sub_tree.last_revision():
537
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
538
sub_merge.merge_type = self.merge_type
539
other_branch = self.other_branch.reference_parent(file_id, relpath)
540
sub_merge.set_other_revision(other_revision, other_branch)
541
base_revision = self.base_tree.get_reference_revision(file_id)
542
sub_merge.base_tree = \
543
sub_tree.branch.repository.revision_tree(base_revision)
544
sub_merge.base_rev_id = base_revision
548
self.this_tree.lock_tree_write()
550
if self.base_tree is not None:
551
self.base_tree.lock_read()
553
if self.other_tree is not None:
554
self.other_tree.lock_read()
556
merge = self.make_merger()
557
self._do_merge_to(merge)
559
if self.other_tree is not None:
560
self.other_tree.unlock()
562
if self.base_tree is not None:
563
self.base_tree.unlock()
565
self.this_tree.unlock()
566
if len(merge.cooked_conflicts) == 0:
567
if not self.ignore_zero and not trace.is_quiet():
568
trace.note("All changes applied successfully.")
570
trace.note("%d conflicts encountered."
571
% len(merge.cooked_conflicts))
573
return len(merge.cooked_conflicts)
576
class _InventoryNoneEntry(object):
577
"""This represents an inventory entry which *isn't there*.
579
It simplifies the merging logic if we always have an InventoryEntry, even
580
if it isn't actually present
587
symlink_target = None
590
_none_entry = _InventoryNoneEntry()
593
class Merge3Merger(object):
594
"""Three-way merger that uses the merge3 text merger"""
596
supports_reprocess = True
597
supports_show_base = True
598
history_based = False
599
supports_cherrypick = True
600
supports_reverse_cherrypick = True
601
winner_idx = {"this": 2, "other": 1, "conflict": 1}
602
supports_lca_trees = True
604
def __init__(self, working_tree, this_tree, base_tree, other_tree,
605
interesting_ids=None, reprocess=False, show_base=False,
606
pb=progress.DummyProgress(), pp=None, change_reporter=None,
607
interesting_files=None, do_merge=True,
608
cherrypick=False, lca_trees=None, this_branch=None):
609
"""Initialize the merger object and perform the merge.
611
:param working_tree: The working tree to apply the merge to
612
:param this_tree: The local tree in the merge operation
613
:param base_tree: The common tree in the merge operation
614
:param other_tree: The other tree to merge changes from
615
:param this_branch: The branch associated with this_tree
616
:param interesting_ids: The file_ids of files that should be
617
participate in the merge. May not be combined with
619
:param: reprocess If True, perform conflict-reduction processing.
620
:param show_base: If True, show the base revision in text conflicts.
621
(incompatible with reprocess)
622
:param pb: A Progress bar
623
:param pp: A ProgressPhase object
624
:param change_reporter: An object that should report changes made
625
:param interesting_files: The tree-relative paths of files that should
626
participate in the merge. If these paths refer to directories,
627
the contents of those directories will also be included. May not
628
be combined with interesting_ids. If neither interesting_files nor
629
interesting_ids is specified, all files may participate in the
631
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
632
if the ancestry was found to include a criss-cross merge.
633
Otherwise should be None.
635
object.__init__(self)
636
if interesting_files is not None and interesting_ids is not None:
638
'specify either interesting_ids or interesting_files')
639
self.interesting_ids = interesting_ids
640
self.interesting_files = interesting_files
641
self.this_tree = working_tree
642
self.base_tree = base_tree
643
self.other_tree = other_tree
644
self.this_branch = this_branch
645
self._raw_conflicts = []
646
self.cooked_conflicts = []
647
self.reprocess = reprocess
648
self.show_base = show_base
649
self._lca_trees = lca_trees
650
# Uncommenting this will change the default algorithm to always use
651
# _entries_lca. This can be useful for running the test suite and
652
# making sure we haven't missed any corner cases.
653
# if lca_trees is None:
654
# self._lca_trees = [self.base_tree]
657
self.change_reporter = change_reporter
658
self.cherrypick = cherrypick
660
self.pp = progress.ProgressPhase("Merge phase", 3, self.pb)
665
self.this_tree.lock_tree_write()
666
self.base_tree.lock_read()
667
self.other_tree.lock_read()
669
self.tt = transform.TreeTransform(self.this_tree, self.pb)
672
self._compute_transform()
674
results = self.tt.apply(no_conflicts=True)
675
self.write_modified(results)
677
self.this_tree.add_conflicts(self.cooked_conflicts)
678
except errors.UnsupportedOperation:
683
self.other_tree.unlock()
684
self.base_tree.unlock()
685
self.this_tree.unlock()
688
def make_preview_transform(self):
689
self.base_tree.lock_read()
690
self.other_tree.lock_read()
691
self.tt = transform.TransformPreview(self.this_tree)
694
self._compute_transform()
697
self.other_tree.unlock()
698
self.base_tree.unlock()
702
def _compute_transform(self):
703
if self._lca_trees is None:
704
entries = self._entries3()
705
resolver = self._three_way
707
entries = self._entries_lca()
708
resolver = self._lca_multi_way
709
child_pb = ui.ui_factory.nested_progress_bar()
711
for num, (file_id, changed, parents3, names3,
712
executable3) in enumerate(entries):
713
child_pb.update('Preparing file merge', num, len(entries))
714
self._merge_names(file_id, parents3, names3, resolver=resolver)
716
file_status = self.merge_contents(file_id)
718
file_status = 'unmodified'
719
self._merge_executable(file_id,
720
executable3, file_status, resolver=resolver)
725
child_pb = ui.ui_factory.nested_progress_bar()
727
fs_conflicts = transform.resolve_conflicts(self.tt, child_pb,
728
lambda t, c: transform.conflict_pass(t, c, self.other_tree))
731
if self.change_reporter is not None:
732
from bzrlib import delta
733
delta.report_changes(
734
self.tt.iter_changes(), self.change_reporter)
735
self.cook_conflicts(fs_conflicts)
736
for conflict in self.cooked_conflicts:
737
trace.warning(conflict)
740
"""Gather data about files modified between three trees.
742
Return a list of tuples of file_id, changed, parents3, names3,
743
executable3. changed is a boolean indicating whether the file contents
744
or kind were changed. parents3 is a tuple of parent ids for base,
745
other and this. names3 is a tuple of names for base, other and this.
746
executable3 is a tuple of execute-bit values for base, other and this.
749
iterator = self.other_tree.iter_changes(self.base_tree,
750
include_unchanged=True, specific_files=self.interesting_files,
751
extra_trees=[self.this_tree])
752
this_entries = dict((e.file_id, e) for p, e in
753
self.this_tree.iter_entries_by_dir(
754
self.interesting_ids))
755
for (file_id, paths, changed, versioned, parents, names, kind,
756
executable) in iterator:
757
if (self.interesting_ids is not None and
758
file_id not in self.interesting_ids):
760
entry = this_entries.get(file_id)
761
if entry is not None:
762
this_name = entry.name
763
this_parent = entry.parent_id
764
this_executable = entry.executable
768
this_executable = None
769
parents3 = parents + (this_parent,)
770
names3 = names + (this_name,)
771
executable3 = executable + (this_executable,)
772
result.append((file_id, changed, parents3, names3, executable3))
775
def _entries_lca(self):
776
"""Gather data about files modified between multiple trees.
778
This compares OTHER versus all LCA trees, and for interesting entries,
779
it then compares with THIS and BASE.
781
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
782
:return: [(file_id, changed, parents, names, executable)]
783
file_id Simple file_id of the entry
784
changed Boolean, True if the kind or contents changed
786
parents ((base, [parent_id, in, lcas]), parent_id_other,
788
names ((base, [name, in, lcas]), name_in_other, name_in_this)
789
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
791
if self.interesting_files is not None:
792
lookup_trees = [self.this_tree, self.base_tree]
793
lookup_trees.extend(self._lca_trees)
794
# I think we should include the lca trees as well
795
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
798
interesting_ids = self.interesting_ids
800
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
802
base_inventory = self.base_tree.inventory
803
this_inventory = self.this_tree.inventory
804
for path, file_id, other_ie, lca_values in walker.iter_all():
805
# Is this modified at all from any of the other trees?
807
other_ie = _none_entry
808
if interesting_ids is not None and file_id not in interesting_ids:
811
# If other_revision is found in any of the lcas, that means this
812
# node is uninteresting. This is because when merging, if there are
813
# multiple heads(), we have to create a new node. So if we didn't,
814
# we know that the ancestry is linear, and that OTHER did not
816
# See doc/developers/lca_merge_resolution.txt for details
817
other_revision = other_ie.revision
818
if other_revision is not None:
819
# We can't use this shortcut when other_revision is None,
820
# because it may be None because things are WorkingTrees, and
821
# not because it is *actually* None.
822
is_unmodified = False
823
for lca_path, ie in lca_values:
824
if ie is not None and ie.revision == other_revision:
831
for lca_path, lca_ie in lca_values:
833
lca_entries.append(_none_entry)
835
lca_entries.append(lca_ie)
837
if file_id in base_inventory:
838
base_ie = base_inventory[file_id]
840
base_ie = _none_entry
842
if file_id in this_inventory:
843
this_ie = this_inventory[file_id]
845
this_ie = _none_entry
851
for lca_ie in lca_entries:
852
lca_kinds.append(lca_ie.kind)
853
lca_parent_ids.append(lca_ie.parent_id)
854
lca_names.append(lca_ie.name)
855
lca_executable.append(lca_ie.executable)
857
kind_winner = self._lca_multi_way(
858
(base_ie.kind, lca_kinds),
859
other_ie.kind, this_ie.kind)
860
parent_id_winner = self._lca_multi_way(
861
(base_ie.parent_id, lca_parent_ids),
862
other_ie.parent_id, this_ie.parent_id)
863
name_winner = self._lca_multi_way(
864
(base_ie.name, lca_names),
865
other_ie.name, this_ie.name)
867
content_changed = True
868
if kind_winner == 'this':
869
# No kind change in OTHER, see if there are *any* changes
870
if other_ie.kind == 'directory':
871
if parent_id_winner == 'this' and name_winner == 'this':
872
# No change for this directory in OTHER, skip
874
content_changed = False
875
elif other_ie.kind is None or other_ie.kind == 'file':
876
def get_sha1(ie, tree):
877
if ie.kind != 'file':
879
return tree.get_file_sha1(file_id)
880
base_sha1 = get_sha1(base_ie, self.base_tree)
881
lca_sha1s = [get_sha1(ie, tree) for ie, tree
882
in zip(lca_entries, self._lca_trees)]
883
this_sha1 = get_sha1(this_ie, self.this_tree)
884
other_sha1 = get_sha1(other_ie, self.other_tree)
885
sha1_winner = self._lca_multi_way(
886
(base_sha1, lca_sha1s), other_sha1, this_sha1,
887
allow_overriding_lca=False)
888
exec_winner = self._lca_multi_way(
889
(base_ie.executable, lca_executable),
890
other_ie.executable, this_ie.executable)
891
if (parent_id_winner == 'this' and name_winner == 'this'
892
and sha1_winner == 'this' and exec_winner == 'this'):
893
# No kind, parent, name, exec, or content change for
894
# OTHER, so this node is not considered interesting
896
if sha1_winner == 'this':
897
content_changed = False
898
elif other_ie.kind == 'symlink':
899
def get_target(ie, tree):
900
if ie.kind != 'symlink':
902
return tree.get_symlink_target(file_id)
903
base_target = get_target(base_ie, self.base_tree)
904
lca_targets = [get_target(ie, tree) for ie, tree
905
in zip(lca_entries, self._lca_trees)]
906
this_target = get_target(this_ie, self.this_tree)
907
other_target = get_target(other_ie, self.other_tree)
908
target_winner = self._lca_multi_way(
909
(base_target, lca_targets),
910
other_target, this_target)
911
if (parent_id_winner == 'this' and name_winner == 'this'
912
and target_winner == 'this'):
913
# No kind, parent, name, or symlink target change
916
if target_winner == 'this':
917
content_changed = False
918
elif other_ie.kind == 'tree-reference':
919
# The 'changed' information seems to be handled at a higher
920
# level. At least, _entries3 returns False for content
921
# changed, even when at a new revision_id.
922
content_changed = False
923
if (parent_id_winner == 'this' and name_winner == 'this'):
924
# Nothing interesting
927
raise AssertionError('unhandled kind: %s' % other_ie.kind)
928
# XXX: We need to handle kind == 'symlink'
930
# If we have gotten this far, that means something has changed
931
result.append((file_id, content_changed,
932
((base_ie.parent_id, lca_parent_ids),
933
other_ie.parent_id, this_ie.parent_id),
934
((base_ie.name, lca_names),
935
other_ie.name, this_ie.name),
936
((base_ie.executable, lca_executable),
937
other_ie.executable, this_ie.executable)
944
self.tt.final_kind(self.tt.root)
945
except errors.NoSuchFile:
946
self.tt.cancel_deletion(self.tt.root)
947
if self.tt.final_file_id(self.tt.root) is None:
948
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
950
other_root_file_id = self.other_tree.get_root_id()
951
if other_root_file_id is None:
953
other_root = self.tt.trans_id_file_id(other_root_file_id)
954
if other_root == self.tt.root:
957
self.tt.final_kind(other_root)
958
except errors.NoSuchFile:
960
if self.this_tree.has_id(self.other_tree.inventory.root.file_id):
961
# the other tree's root is a non-root in the current tree
963
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
964
self.tt.cancel_creation(other_root)
965
self.tt.cancel_versioning(other_root)
967
def reparent_children(self, ie, target):
968
for thing, child in ie.children.iteritems():
969
trans_id = self.tt.trans_id_file_id(child.file_id)
970
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
972
def write_modified(self, results):
974
for path in results.modified_paths:
975
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
978
hash = self.this_tree.get_file_sha1(file_id)
981
modified_hashes[file_id] = hash
982
self.this_tree.set_merge_modified(modified_hashes)
985
def parent(entry, file_id):
986
"""Determine the parent for a file_id (used as a key method)"""
989
return entry.parent_id
992
def name(entry, file_id):
993
"""Determine the name for a file_id (used as a key method)"""
999
def contents_sha1(tree, file_id):
1000
"""Determine the sha1 of the file contents (used as a key method)."""
1001
if file_id not in tree:
1003
return tree.get_file_sha1(file_id)
1006
def executable(tree, file_id):
1007
"""Determine the executability of a file-id (used as a key method)."""
1008
if not tree.has_id(file_id):
1010
if tree.kind(file_id) != "file":
1012
return tree.is_executable(file_id)
1015
def kind(tree, file_id):
1016
"""Determine the kind of a file-id (used as a key method)."""
1017
if not tree.has_id(file_id):
1019
return tree.kind(file_id)
1022
def _three_way(base, other, this):
1023
#if base == other, either they all agree, or only THIS has changed.
1026
elif this not in (base, other):
1028
# "Ambiguous clean merge" -- both sides have made the same change.
1031
# this == base: only other has changed.
1036
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
1037
"""Consider LCAs when determining whether a change has occurred.
1039
If LCAS are all identical, this is the same as a _three_way comparison.
1041
:param bases: value in (BASE, [LCAS])
1042
:param other: value in OTHER
1043
:param this: value in THIS
1044
:param allow_overriding_lca: If there is more than one unique lca
1045
value, allow OTHER to override THIS if it has a new value, and
1046
THIS only has an lca value, or vice versa. This is appropriate for
1047
truly scalar values, not as much for non-scalars.
1048
:return: 'this', 'other', or 'conflict' depending on whether an entry
1051
# See doc/developers/lca_tree_merging.txt for details about this
1054
# Either Ambiguously clean, or nothing was actually changed. We
1057
base_val, lca_vals = bases
1058
# Remove 'base_val' from the lca_vals, because it is not interesting
1059
filtered_lca_vals = [lca_val for lca_val in lca_vals
1060
if lca_val != base_val]
1061
if len(filtered_lca_vals) == 0:
1062
return Merge3Merger._three_way(base_val, other, this)
1064
unique_lca_vals = set(filtered_lca_vals)
1065
if len(unique_lca_vals) == 1:
1066
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1068
if allow_overriding_lca:
1069
if other in unique_lca_vals:
1070
if this in unique_lca_vals:
1071
# Each side picked a different lca, conflict
1074
# This has a value which supersedes both lca values, and
1075
# other only has an lca value
1077
elif this in unique_lca_vals:
1078
# OTHER has a value which supersedes both lca values, and this
1079
# only has an lca value
1082
# At this point, the lcas disagree, and the tips disagree
1086
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
1087
"""Do a three-way test on a scalar.
1088
Return "this", "other" or "conflict", depending whether a value wins.
1090
key_base = key(base_tree, file_id)
1091
key_other = key(other_tree, file_id)
1092
#if base == other, either they all agree, or only THIS has changed.
1093
if key_base == key_other:
1095
key_this = key(this_tree, file_id)
1096
# "Ambiguous clean merge"
1097
if key_this == key_other:
1099
elif key_this == key_base:
1104
def merge_names(self, file_id):
1105
def get_entry(tree):
1106
if tree.has_id(file_id):
1107
return tree.inventory[file_id]
1110
this_entry = get_entry(self.this_tree)
1111
other_entry = get_entry(self.other_tree)
1112
base_entry = get_entry(self.base_tree)
1113
entries = (base_entry, other_entry, this_entry)
1116
for entry in entries:
1119
parents.append(None)
1121
names.append(entry.name)
1122
parents.append(entry.parent_id)
1123
return self._merge_names(file_id, parents, names,
1124
resolver=self._three_way)
1126
def _merge_names(self, file_id, parents, names, resolver):
1127
"""Perform a merge on file_id names and parents"""
1128
base_name, other_name, this_name = names
1129
base_parent, other_parent, this_parent = parents
1131
name_winner = resolver(*names)
1133
parent_id_winner = resolver(*parents)
1134
if this_name is None:
1135
if name_winner == "this":
1136
name_winner = "other"
1137
if parent_id_winner == "this":
1138
parent_id_winner = "other"
1139
if name_winner == "this" and parent_id_winner == "this":
1141
if name_winner == "conflict":
1142
trans_id = self.tt.trans_id_file_id(file_id)
1143
self._raw_conflicts.append(('name conflict', trans_id,
1144
this_name, other_name))
1145
if parent_id_winner == "conflict":
1146
trans_id = self.tt.trans_id_file_id(file_id)
1147
self._raw_conflicts.append(('parent conflict', trans_id,
1148
this_parent, other_parent))
1149
if other_name is None:
1150
# it doesn't matter whether the result was 'other' or
1151
# 'conflict'-- if there's no 'other', we leave it alone.
1153
# if we get here, name_winner and parent_winner are set to safe values.
1154
trans_id = self.tt.trans_id_file_id(file_id)
1155
parent_id = parents[self.winner_idx[parent_id_winner]]
1156
if parent_id is not None:
1157
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1158
self.tt.adjust_path(names[self.winner_idx[name_winner]],
1159
parent_trans_id, trans_id)
1161
def merge_contents(self, file_id):
1162
"""Performs a merge on file_id contents."""
1163
def contents_pair(tree):
1164
if file_id not in tree:
1166
kind = tree.kind(file_id)
1168
contents = tree.get_file_sha1(file_id)
1169
elif kind == "symlink":
1170
contents = tree.get_symlink_target(file_id)
1173
return kind, contents
1175
# See SPOT run. run, SPOT, run.
1176
# So we're not QUITE repeating ourselves; we do tricky things with
1178
base_pair = contents_pair(self.base_tree)
1179
other_pair = contents_pair(self.other_tree)
1181
this_pair = contents_pair(self.this_tree)
1182
lca_pairs = [contents_pair(tree) for tree in self._lca_trees]
1183
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1184
this_pair, allow_overriding_lca=False)
1186
if base_pair == other_pair:
1189
# We delayed evaluating this_pair as long as we can to avoid
1190
# unnecessary sha1 calculation
1191
this_pair = contents_pair(self.this_tree)
1192
winner = self._three_way(base_pair, other_pair, this_pair)
1193
if winner == 'this':
1194
# No interesting changes introduced by OTHER
1196
# We have a hypothetical conflict, but if we have files, then we
1197
# can try to merge the content
1198
trans_id = self.tt.trans_id_file_id(file_id)
1199
params = MergeHookParams(self, file_id, trans_id, this_pair[0],
1200
other_pair[0], winner)
1201
hooks = Merger.hooks['merge_file_content']
1202
hooks = list(hooks) + [self.default_text_merge]
1203
hook_status = 'not_applicable'
1205
hook_status, lines = hook(params)
1206
if hook_status != 'not_applicable':
1207
# Don't try any more hooks, this one applies.
1210
if hook_status == 'not_applicable':
1211
# This is a contents conflict, because none of the available
1212
# functions could merge it.
1214
name = self.tt.final_name(trans_id)
1215
parent_id = self.tt.final_parent(trans_id)
1216
if self.this_tree.has_id(file_id):
1217
self.tt.unversion_file(trans_id)
1218
file_group = self._dump_conflicts(name, parent_id, file_id,
1220
self._raw_conflicts.append(('contents conflict', file_group))
1221
elif hook_status == 'success':
1222
self.tt.create_file(lines, trans_id)
1223
elif hook_status == 'conflicted':
1224
# XXX: perhaps the hook should be able to provide
1225
# the BASE/THIS/OTHER files?
1226
self.tt.create_file(lines, trans_id)
1227
self._raw_conflicts.append(('text conflict', trans_id))
1228
name = self.tt.final_name(trans_id)
1229
parent_id = self.tt.final_parent(trans_id)
1230
self._dump_conflicts(name, parent_id, file_id)
1231
elif hook_status == 'delete':
1232
self.tt.unversion_file(trans_id)
1234
elif hook_status == 'done':
1235
# The hook function did whatever it needs to do directly, no
1236
# further action needed here.
1239
raise AssertionError('unknown hook_status: %r' % (hook_status,))
1240
if not self.this_tree.has_id(file_id) and result == "modified":
1241
self.tt.version_file(file_id, trans_id)
1243
self.tt.tree_kind(trans_id)
1244
self.tt.delete_contents(trans_id)
1245
except errors.NoSuchFile:
1249
def _default_other_winner_merge(self, merge_hook_params):
1250
"""Replace this contents with other."""
1251
file_id = merge_hook_params.file_id
1252
trans_id = merge_hook_params.trans_id
1253
file_in_this = self.this_tree.has_id(file_id)
1254
if self.other_tree.has_id(file_id):
1255
# OTHER changed the file
1257
if wt.supports_content_filtering():
1258
# We get the path from the working tree if it exists.
1259
# That fails though when OTHER is adding a file, so
1260
# we fall back to the other tree to find the path if
1261
# it doesn't exist locally.
1263
filter_tree_path = wt.id2path(file_id)
1264
except errors.NoSuchId:
1265
filter_tree_path = self.other_tree.id2path(file_id)
1267
# Skip the id2path lookup for older formats
1268
filter_tree_path = None
1269
transform.create_from_tree(self.tt, trans_id,
1270
self.other_tree, file_id,
1271
filter_tree_path=filter_tree_path)
1274
# OTHER deleted the file
1275
return 'delete', None
1277
raise AssertionError(
1278
'winner is OTHER, but file_id %r not in THIS or OTHER tree'
1281
def default_text_merge(self, merge_hook_params):
1282
if merge_hook_params.winner == 'other':
1283
# OTHER is a straight winner, so replace this contents with other
1284
return self._default_other_winner_merge(merge_hook_params)
1285
elif merge_hook_params.is_file_merge():
1286
# THIS and OTHER are both files, so text merge. Either
1287
# BASE is a file, or both converted to files, so at least we
1288
# have agreement that output should be a file.
1290
self.text_merge(merge_hook_params.file_id,
1291
merge_hook_params.trans_id)
1292
except errors.BinaryFile:
1293
return 'not_applicable', None
1296
return 'not_applicable', None
1298
def get_lines(self, tree, file_id):
1299
"""Return the lines in a file, or an empty list."""
1300
if tree.has_id(file_id):
1301
return tree.get_file(file_id).readlines()
1305
def text_merge(self, file_id, trans_id):
1306
"""Perform a three-way text merge on a file_id"""
1307
# it's possible that we got here with base as a different type.
1308
# if so, we just want two-way text conflicts.
1309
if self.base_tree.has_id(file_id) and \
1310
self.base_tree.kind(file_id) == "file":
1311
base_lines = self.get_lines(self.base_tree, file_id)
1314
other_lines = self.get_lines(self.other_tree, file_id)
1315
this_lines = self.get_lines(self.this_tree, file_id)
1316
m3 = merge3.Merge3(base_lines, this_lines, other_lines,
1317
is_cherrypick=self.cherrypick)
1318
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
1319
if self.show_base is True:
1320
base_marker = '|' * 7
1324
def iter_merge3(retval):
1325
retval["text_conflicts"] = False
1326
for line in m3.merge_lines(name_a = "TREE",
1327
name_b = "MERGE-SOURCE",
1328
name_base = "BASE-REVISION",
1329
start_marker=start_marker,
1330
base_marker=base_marker,
1331
reprocess=self.reprocess):
1332
if line.startswith(start_marker):
1333
retval["text_conflicts"] = True
1334
yield line.replace(start_marker, '<' * 7)
1338
merge3_iterator = iter_merge3(retval)
1339
self.tt.create_file(merge3_iterator, trans_id)
1340
if retval["text_conflicts"] is True:
1341
self._raw_conflicts.append(('text conflict', trans_id))
1342
name = self.tt.final_name(trans_id)
1343
parent_id = self.tt.final_parent(trans_id)
1344
file_group = self._dump_conflicts(name, parent_id, file_id,
1345
this_lines, base_lines,
1347
file_group.append(trans_id)
1349
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
1350
base_lines=None, other_lines=None, set_version=False,
1352
"""Emit conflict files.
1353
If this_lines, base_lines, or other_lines are omitted, they will be
1354
determined automatically. If set_version is true, the .OTHER, .THIS
1355
or .BASE (in that order) will be created as versioned files.
1357
data = [('OTHER', self.other_tree, other_lines),
1358
('THIS', self.this_tree, this_lines)]
1360
data.append(('BASE', self.base_tree, base_lines))
1362
# We need to use the actual path in the working tree of the file here,
1363
# ignoring the conflict suffixes
1365
if wt.supports_content_filtering():
1367
filter_tree_path = wt.id2path(file_id)
1368
except errors.NoSuchId:
1369
# file has been deleted
1370
filter_tree_path = None
1372
# Skip the id2path lookup for older formats
1373
filter_tree_path = None
1377
for suffix, tree, lines in data:
1378
if tree.has_id(file_id):
1379
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1380
suffix, lines, filter_tree_path)
1381
file_group.append(trans_id)
1382
if set_version and not versioned:
1383
self.tt.version_file(file_id, trans_id)
1387
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1388
lines=None, filter_tree_path=None):
1389
"""Emit a single conflict file."""
1390
name = name + '.' + suffix
1391
trans_id = self.tt.create_path(name, parent_id)
1392
transform.create_from_tree(self.tt, trans_id, tree, file_id, lines,
1396
def merge_executable(self, file_id, file_status):
1397
"""Perform a merge on the execute bit."""
1398
executable = [self.executable(t, file_id) for t in (self.base_tree,
1399
self.other_tree, self.this_tree)]
1400
self._merge_executable(file_id, executable, file_status,
1401
resolver=self._three_way)
1403
def _merge_executable(self, file_id, executable, file_status,
1405
"""Perform a merge on the execute bit."""
1406
base_executable, other_executable, this_executable = executable
1407
if file_status == "deleted":
1409
winner = resolver(*executable)
1410
if winner == "conflict":
1411
# There must be a None in here, if we have a conflict, but we
1412
# need executability since file status was not deleted.
1413
if self.executable(self.other_tree, file_id) is None:
1417
if winner == 'this' and file_status != "modified":
1419
trans_id = self.tt.trans_id_file_id(file_id)
1421
if self.tt.final_kind(trans_id) != "file":
1423
except errors.NoSuchFile:
1425
if winner == "this":
1426
executability = this_executable
1428
if self.other_tree.has_id(file_id):
1429
executability = other_executable
1430
elif self.this_tree.has_id(file_id):
1431
executability = this_executable
1432
elif self.base_tree_has_id(file_id):
1433
executability = base_executable
1434
if executability is not None:
1435
trans_id = self.tt.trans_id_file_id(file_id)
1436
self.tt.set_executability(executability, trans_id)
1438
def cook_conflicts(self, fs_conflicts):
1439
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1441
self.cooked_conflicts.extend(transform.cook_conflicts(
1442
fs_conflicts, self.tt))
1443
fp = transform.FinalPaths(self.tt)
1444
for conflict in self._raw_conflicts:
1445
conflict_type = conflict[0]
1446
if conflict_type in ('name conflict', 'parent conflict'):
1447
trans_id = conflict[1]
1448
conflict_args = conflict[2:]
1449
if trans_id not in name_conflicts:
1450
name_conflicts[trans_id] = {}
1451
transform.unique_add(name_conflicts[trans_id], conflict_type,
1453
if conflict_type == 'contents conflict':
1454
for trans_id in conflict[1]:
1455
file_id = self.tt.final_file_id(trans_id)
1456
if file_id is not None:
1458
path = fp.get_path(trans_id)
1459
for suffix in ('.BASE', '.THIS', '.OTHER'):
1460
if path.endswith(suffix):
1461
path = path[:-len(suffix)]
1463
c = _mod_conflicts.Conflict.factory(conflict_type,
1464
path=path, file_id=file_id)
1465
self.cooked_conflicts.append(c)
1466
if conflict_type == 'text conflict':
1467
trans_id = conflict[1]
1468
path = fp.get_path(trans_id)
1469
file_id = self.tt.final_file_id(trans_id)
1470
c = _mod_conflicts.Conflict.factory(conflict_type,
1471
path=path, file_id=file_id)
1472
self.cooked_conflicts.append(c)
1474
for trans_id, conflicts in name_conflicts.iteritems():
1476
this_parent, other_parent = conflicts['parent conflict']
1477
if this_parent == other_parent:
1478
raise AssertionError()
1480
this_parent = other_parent = \
1481
self.tt.final_file_id(self.tt.final_parent(trans_id))
1483
this_name, other_name = conflicts['name conflict']
1484
if this_name == other_name:
1485
raise AssertionError()
1487
this_name = other_name = self.tt.final_name(trans_id)
1488
other_path = fp.get_path(trans_id)
1489
if this_parent is not None and this_name is not None:
1490
this_parent_path = \
1491
fp.get_path(self.tt.trans_id_file_id(this_parent))
1492
this_path = osutils.pathjoin(this_parent_path, this_name)
1494
this_path = "<deleted>"
1495
file_id = self.tt.final_file_id(trans_id)
1496
c = _mod_conflicts.Conflict.factory('path conflict', path=this_path,
1497
conflict_path=other_path,
1499
self.cooked_conflicts.append(c)
1500
self.cooked_conflicts.sort(key=_mod_conflicts.Conflict.sort_key)
1503
class WeaveMerger(Merge3Merger):
1504
"""Three-way tree merger, text weave merger."""
1505
supports_reprocess = True
1506
supports_show_base = False
1507
supports_reverse_cherrypick = False
1508
history_based = True
1510
def _generate_merge_plan(self, file_id, base):
1511
return self.this_tree.plan_file_merge(file_id, self.other_tree,
1514
def _merged_lines(self, file_id):
1515
"""Generate the merged lines.
1516
There is no distinction between lines that are meant to contain <<<<<<<
1520
base = self.base_tree
1523
plan = self._generate_merge_plan(file_id, base)
1524
if 'merge' in debug.debug_flags:
1526
trans_id = self.tt.trans_id_file_id(file_id)
1527
name = self.tt.final_name(trans_id) + '.plan'
1528
contents = ('%11s|%s' % l for l in plan)
1529
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1530
textmerge = versionedfile.PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1531
'>>>>>>> MERGE-SOURCE\n')
1532
lines, conflicts = textmerge.merge_lines(self.reprocess)
1534
base_lines = textmerge.base_from_plan()
1537
return lines, base_lines
1539
def text_merge(self, file_id, trans_id):
1540
"""Perform a (weave) text merge for a given file and file-id.
1541
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1542
and a conflict will be noted.
1544
lines, base_lines = self._merged_lines(file_id)
1546
# Note we're checking whether the OUTPUT is binary in this case,
1547
# because we don't want to get into weave merge guts.
1548
textfile.check_text_lines(lines)
1549
self.tt.create_file(lines, trans_id)
1550
if base_lines is not None:
1552
self._raw_conflicts.append(('text conflict', trans_id))
1553
name = self.tt.final_name(trans_id)
1554
parent_id = self.tt.final_parent(trans_id)
1555
file_group = self._dump_conflicts(name, parent_id, file_id,
1557
base_lines=base_lines)
1558
file_group.append(trans_id)
1561
class LCAMerger(WeaveMerger):
1563
def _generate_merge_plan(self, file_id, base):
1564
return self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1567
class Diff3Merger(Merge3Merger):
1568
"""Three-way merger using external diff3 for text merging"""
1570
def dump_file(self, temp_dir, name, tree, file_id):
1571
out_path = osutils.pathjoin(temp_dir, name)
1572
out_file = open(out_path, "wb")
1574
in_file = tree.get_file(file_id)
1575
for line in in_file:
1576
out_file.write(line)
1581
def text_merge(self, file_id, trans_id):
1582
"""Perform a diff3 merge using a specified file-id and trans-id.
1583
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1584
will be dumped, and a will be conflict noted.
1587
temp_dir = osutils.mkdtemp(prefix="bzr-")
1589
new_file = osutils.pathjoin(temp_dir, "new")
1590
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1591
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1592
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1593
status = bzrlib.patch.diff3(new_file, this, base, other)
1594
if status not in (0, 1):
1595
raise errors.BzrError("Unhandled diff3 exit code")
1596
f = open(new_file, 'rb')
1598
self.tt.create_file(f, trans_id)
1602
name = self.tt.final_name(trans_id)
1603
parent_id = self.tt.final_parent(trans_id)
1604
self._dump_conflicts(name, parent_id, file_id)
1605
self._raw_conflicts.append(('text conflict', trans_id))
1607
osutils.rmtree(temp_dir)
1610
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1612
merge_type=Merge3Merger,
1613
interesting_ids=None,
1617
interesting_files=None,
1619
pb=progress.DummyProgress(),
1620
change_reporter=None):
1621
"""Primary interface for merging.
1623
typical use is probably
1624
'merge_inner(branch, branch.get_revision_tree(other_revision),
1625
branch.get_revision_tree(base_revision))'
1627
if this_tree is None:
1628
raise errors.BzrError("bzrlib.merge.merge_inner requires a this_tree "
1629
"parameter as of bzrlib version 0.8.")
1630
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1631
pb=pb, change_reporter=change_reporter)
1632
merger.backup_files = backup_files
1633
merger.merge_type = merge_type
1634
merger.interesting_ids = interesting_ids
1635
merger.ignore_zero = ignore_zero
1636
if interesting_files:
1638
raise ValueError('Only supply interesting_ids'
1639
' or interesting_files')
1640
merger.interesting_files = interesting_files
1641
merger.show_base = show_base
1642
merger.reprocess = reprocess
1643
merger.other_rev_id = other_rev_id
1644
merger.other_basis = other_rev_id
1645
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1646
if get_revision_id is None:
1647
get_revision_id = base_tree.last_revision
1648
merger.cache_trees_with_revision_ids([other_tree, base_tree, this_tree])
1649
merger.set_base_revision(get_revision_id(), this_branch)
1650
return merger.do_merge()
1652
def get_merge_type_registry():
1653
"""Merge type registry is in bzrlib.option to avoid circular imports.
1655
This method provides a sanctioned way to retrieve it.
1657
from bzrlib import option
1658
return option._merge_type_registry
1661
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1662
def status_a(revision, text):
1663
if revision in ancestors_b:
1664
return 'killed-b', text
1666
return 'new-a', text
1668
def status_b(revision, text):
1669
if revision in ancestors_a:
1670
return 'killed-a', text
1672
return 'new-b', text
1674
plain_a = [t for (a, t) in annotated_a]
1675
plain_b = [t for (a, t) in annotated_b]
1676
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1677
blocks = matcher.get_matching_blocks()
1680
for ai, bi, l in blocks:
1681
# process all mismatched sections
1682
# (last mismatched section is handled because blocks always
1683
# includes a 0-length last block)
1684
for revision, text in annotated_a[a_cur:ai]:
1685
yield status_a(revision, text)
1686
for revision, text in annotated_b[b_cur:bi]:
1687
yield status_b(revision, text)
1688
# and now the matched section
1691
for text_a in plain_a[ai:a_cur]:
1692
yield "unchanged", text_a
1695
class _PlanMergeBase(object):
1697
def __init__(self, a_rev, b_rev, vf, key_prefix):
1700
:param a_rev: Revision-id of one revision to merge
1701
:param b_rev: Revision-id of the other revision to merge
1702
:param vf: A VersionedFiles containing both revisions
1703
:param key_prefix: A prefix for accessing keys in vf, typically
1709
self._last_lines = None
1710
self._last_lines_revision_id = None
1711
self._cached_matching_blocks = {}
1712
self._key_prefix = key_prefix
1713
self._precache_tip_lines()
1715
def _precache_tip_lines(self):
1716
lines = self.get_lines([self.a_rev, self.b_rev])
1717
self.lines_a = lines[self.a_rev]
1718
self.lines_b = lines[self.b_rev]
1720
def get_lines(self, revisions):
1721
"""Get lines for revisions from the backing VersionedFiles.
1723
:raises RevisionNotPresent: on absent texts.
1725
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1727
for record in self.vf.get_record_stream(keys, 'unordered', True):
1728
if record.storage_kind == 'absent':
1729
raise errors.RevisionNotPresent(record.key, self.vf)
1730
result[record.key[-1]] = osutils.chunks_to_lines(
1731
record.get_bytes_as('chunked'))
1734
def plan_merge(self):
1735
"""Generate a 'plan' for merging the two revisions.
1737
This involves comparing their texts and determining the cause of
1738
differences. If text A has a line and text B does not, then either the
1739
line was added to text A, or it was deleted from B. Once the causes
1740
are combined, they are written out in the format described in
1741
VersionedFile.plan_merge
1743
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1744
unique_a, unique_b = self._unique_lines(blocks)
1745
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1746
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1747
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1749
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1752
for i, j, n in blocks:
1753
for a_index in range(last_i, i):
1754
if a_index in new_a:
1755
if a_index in killed_b:
1756
yield 'conflicted-a', self.lines_a[a_index]
1758
yield 'new-a', self.lines_a[a_index]
1760
yield 'killed-b', self.lines_a[a_index]
1761
for b_index in range(last_j, j):
1762
if b_index in new_b:
1763
if b_index in killed_a:
1764
yield 'conflicted-b', self.lines_b[b_index]
1766
yield 'new-b', self.lines_b[b_index]
1768
yield 'killed-a', self.lines_b[b_index]
1769
# handle common lines
1770
for a_index in range(i, i+n):
1771
yield 'unchanged', self.lines_a[a_index]
1775
def _get_matching_blocks(self, left_revision, right_revision):
1776
"""Return a description of which sections of two revisions match.
1778
See SequenceMatcher.get_matching_blocks
1780
cached = self._cached_matching_blocks.get((left_revision,
1782
if cached is not None:
1784
if self._last_lines_revision_id == left_revision:
1785
left_lines = self._last_lines
1786
right_lines = self.get_lines([right_revision])[right_revision]
1788
lines = self.get_lines([left_revision, right_revision])
1789
left_lines = lines[left_revision]
1790
right_lines = lines[right_revision]
1791
self._last_lines = right_lines
1792
self._last_lines_revision_id = right_revision
1793
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1795
return matcher.get_matching_blocks()
1797
def _unique_lines(self, matching_blocks):
1798
"""Analyse matching_blocks to determine which lines are unique
1800
:return: a tuple of (unique_left, unique_right), where the values are
1801
sets of line numbers of unique lines.
1807
for i, j, n in matching_blocks:
1808
unique_left.extend(range(last_i, i))
1809
unique_right.extend(range(last_j, j))
1812
return unique_left, unique_right
1815
def _subtract_plans(old_plan, new_plan):
1816
"""Remove changes from new_plan that came from old_plan.
1818
It is assumed that the difference between the old_plan and new_plan
1819
is their choice of 'b' text.
1821
All lines from new_plan that differ from old_plan are emitted
1822
verbatim. All lines from new_plan that match old_plan but are
1823
not about the 'b' revision are emitted verbatim.
1825
Lines that match and are about the 'b' revision are the lines we
1826
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1827
is skipped entirely.
1829
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1832
for i, j, n in matcher.get_matching_blocks():
1833
for jj in range(last_j, j):
1835
for jj in range(j, j+n):
1836
plan_line = new_plan[jj]
1837
if plan_line[0] == 'new-b':
1839
elif plan_line[0] == 'killed-b':
1840
yield 'unchanged', plan_line[1]
1846
class _PlanMerge(_PlanMergeBase):
1847
"""Plan an annotate merge using on-the-fly annotation"""
1849
def __init__(self, a_rev, b_rev, vf, key_prefix):
1850
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1851
self.a_key = self._key_prefix + (self.a_rev,)
1852
self.b_key = self._key_prefix + (self.b_rev,)
1853
self.graph = _mod_graph.Graph(self.vf)
1854
heads = self.graph.heads((self.a_key, self.b_key))
1856
# one side dominates, so we can just return its values, yay for
1858
# Ideally we would know that before we get this far
1859
self._head_key = heads.pop()
1860
if self._head_key == self.a_key:
1864
trace.mutter('found dominating revision for %s\n%s > %s', self.vf,
1865
self._head_key[-1], other)
1868
self._head_key = None
1871
def _precache_tip_lines(self):
1872
# Turn this into a no-op, because we will do this later
1875
def _find_recursive_lcas(self):
1876
"""Find all the ancestors back to a unique lca"""
1877
cur_ancestors = (self.a_key, self.b_key)
1878
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1879
# rather than a key tuple. We will just map that directly to no common
1883
next_lcas = self.graph.find_lca(*cur_ancestors)
1884
# Map a plain NULL_REVISION to a simple no-ancestors
1885
if next_lcas == set([_mod_revision.NULL_REVISION]):
1887
# Order the lca's based on when they were merged into the tip
1888
# While the actual merge portion of weave merge uses a set() of
1889
# active revisions, the order of insertion *does* effect the
1890
# implicit ordering of the texts.
1891
for rev_key in cur_ancestors:
1892
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1894
parent_map[rev_key] = ordered_parents
1895
if len(next_lcas) == 0:
1897
elif len(next_lcas) == 1:
1898
parent_map[list(next_lcas)[0]] = ()
1900
elif len(next_lcas) > 2:
1901
# More than 2 lca's, fall back to grabbing all nodes between
1902
# this and the unique lca.
1903
trace.mutter('More than 2 LCAs, falling back to all nodes for:'
1905
self.a_key, self.b_key, cur_ancestors)
1906
cur_lcas = next_lcas
1907
while len(cur_lcas) > 1:
1908
cur_lcas = self.graph.find_lca(*cur_lcas)
1909
if len(cur_lcas) == 0:
1910
# No common base to find, use the full ancestry
1913
unique_lca = list(cur_lcas)[0]
1914
if unique_lca == _mod_revision.NULL_REVISION:
1915
# find_lca will return a plain 'NULL_REVISION' rather
1916
# than a key tuple when there is no common ancestor, we
1917
# prefer to just use None, because it doesn't confuse
1918
# _get_interesting_texts()
1920
parent_map.update(self._find_unique_parents(next_lcas,
1923
cur_ancestors = next_lcas
1926
def _find_unique_parents(self, tip_keys, base_key):
1927
"""Find ancestors of tip that aren't ancestors of base.
1929
:param tip_keys: Nodes that are interesting
1930
:param base_key: Cull all ancestors of this node
1931
:return: The parent map for all revisions between tip_keys and
1932
base_key. base_key will be included. References to nodes outside of
1933
the ancestor set will also be removed.
1935
# TODO: this would be simpler if find_unique_ancestors took a list
1936
# instead of a single tip, internally it supports it, but it
1937
# isn't a "backwards compatible" api change.
1938
if base_key is None:
1939
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1940
# We remove NULL_REVISION because it isn't a proper tuple key, and
1941
# thus confuses things like _get_interesting_texts, and our logic
1942
# to add the texts into the memory weave.
1943
if _mod_revision.NULL_REVISION in parent_map:
1944
parent_map.pop(_mod_revision.NULL_REVISION)
1947
for tip in tip_keys:
1949
self.graph.find_unique_ancestors(tip, [base_key]))
1950
parent_map = self.graph.get_parent_map(interesting)
1951
parent_map[base_key] = ()
1952
culled_parent_map, child_map, tails = self._remove_external_references(
1954
# Remove all the tails but base_key
1955
if base_key is not None:
1956
tails.remove(base_key)
1957
self._prune_tails(culled_parent_map, child_map, tails)
1958
# Now remove all the uninteresting 'linear' regions
1959
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1963
def _remove_external_references(parent_map):
1964
"""Remove references that go outside of the parent map.
1966
:param parent_map: Something returned from Graph.get_parent_map(keys)
1967
:return: (filtered_parent_map, child_map, tails)
1968
filtered_parent_map is parent_map without external references
1969
child_map is the {parent_key: [child_keys]} mapping
1970
tails is a list of nodes that do not have any parents in the map
1972
# TODO: The basic effect of this function seems more generic than
1973
# _PlanMerge. But the specific details of building a child_map,
1974
# and computing tails seems very specific to _PlanMerge.
1975
# Still, should this be in Graph land?
1976
filtered_parent_map = {}
1979
for key, parent_keys in parent_map.iteritems():
1980
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1981
if not culled_parent_keys:
1983
for parent_key in culled_parent_keys:
1984
child_map.setdefault(parent_key, []).append(key)
1985
# TODO: Do we want to do this, it adds overhead for every node,
1986
# just to say that the node has no children
1987
child_map.setdefault(key, [])
1988
filtered_parent_map[key] = culled_parent_keys
1989
return filtered_parent_map, child_map, tails
1992
def _prune_tails(parent_map, child_map, tails_to_remove):
1993
"""Remove tails from the parent map.
1995
This will remove the supplied revisions until no more children have 0
1998
:param parent_map: A dict of {child: [parents]}, this dictionary will
1999
be modified in place.
2000
:param tails_to_remove: A list of tips that should be removed,
2001
this list will be consumed
2002
:param child_map: The reverse dict of parent_map ({parent: [children]})
2003
this dict will be modified
2004
:return: None, parent_map will be modified in place.
2006
while tails_to_remove:
2007
next = tails_to_remove.pop()
2008
parent_map.pop(next)
2009
children = child_map.pop(next)
2010
for child in children:
2011
child_parents = parent_map[child]
2012
child_parents.remove(next)
2013
if len(child_parents) == 0:
2014
tails_to_remove.append(child)
2016
def _get_interesting_texts(self, parent_map):
2017
"""Return a dict of texts we are interested in.
2019
Note that the input is in key tuples, but the output is in plain
2022
:param parent_map: The output from _find_recursive_lcas
2023
:return: A dict of {'revision_id':lines} as returned by
2024
_PlanMergeBase.get_lines()
2026
all_revision_keys = set(parent_map)
2027
all_revision_keys.add(self.a_key)
2028
all_revision_keys.add(self.b_key)
2030
# Everything else is in 'keys' but get_lines is in 'revision_ids'
2031
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
2034
def _build_weave(self):
2035
from bzrlib import weave
2036
self._weave = weave.Weave(weave_name='in_memory_weave',
2037
allow_reserved=True)
2038
parent_map = self._find_recursive_lcas()
2040
all_texts = self._get_interesting_texts(parent_map)
2042
# Note: Unfortunately, the order given by topo_sort will effect the
2043
# ordering resolution in the output. Specifically, if you add A then B,
2044
# then in the output text A lines will show up before B lines. And, of
2045
# course, topo_sort doesn't guarantee any real ordering.
2046
# So we use merge_sort, and add a fake node on the tip.
2047
# This ensures that left-hand parents will always be inserted into the
2048
# weave before right-hand parents.
2049
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
2050
parent_map[tip_key] = (self.a_key, self.b_key)
2052
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
2056
# for key in tsort.topo_sort(parent_map):
2057
parent_keys = parent_map[key]
2058
revision_id = key[-1]
2059
parent_ids = [k[-1] for k in parent_keys]
2060
self._weave.add_lines(revision_id, parent_ids,
2061
all_texts[revision_id])
2063
def plan_merge(self):
2064
"""Generate a 'plan' for merging the two revisions.
2066
This involves comparing their texts and determining the cause of
2067
differences. If text A has a line and text B does not, then either the
2068
line was added to text A, or it was deleted from B. Once the causes
2069
are combined, they are written out in the format described in
2070
VersionedFile.plan_merge
2072
if self._head_key is not None: # There was a single head
2073
if self._head_key == self.a_key:
2076
if self._head_key != self.b_key:
2077
raise AssertionError('There was an invalid head: %s != %s'
2078
% (self.b_key, self._head_key))
2080
head_rev = self._head_key[-1]
2081
lines = self.get_lines([head_rev])[head_rev]
2082
return ((plan, line) for line in lines)
2083
return self._weave.plan_merge(self.a_rev, self.b_rev)
2086
class _PlanLCAMerge(_PlanMergeBase):
2088
This merge algorithm differs from _PlanMerge in that:
2089
1. comparisons are done against LCAs only
2090
2. cases where a contested line is new versus one LCA but old versus
2091
another are marked as conflicts, by emitting the line as conflicted-a
2094
This is faster, and hopefully produces more useful output.
2097
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
2098
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
2099
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
2102
if lca == _mod_revision.NULL_REVISION:
2105
self.lcas.add(lca[-1])
2106
for lca in self.lcas:
2107
if _mod_revision.is_null(lca):
2110
lca_lines = self.get_lines([lca])[lca]
2111
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
2113
blocks = list(matcher.get_matching_blocks())
2114
self._cached_matching_blocks[(a_rev, lca)] = blocks
2115
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2117
blocks = list(matcher.get_matching_blocks())
2118
self._cached_matching_blocks[(b_rev, lca)] = blocks
2120
def _determine_status(self, revision_id, unique_line_numbers):
2121
"""Determines the status unique lines versus all lcas.
2123
Basically, determines why the line is unique to this revision.
2125
A line may be determined new, killed, or both.
2127
If a line is determined new, that means it was not present in at least
2128
one LCA, and is not present in the other merge revision.
2130
If a line is determined killed, that means the line was present in
2133
If a line is killed and new, this indicates that the two merge
2134
revisions contain differing conflict resolutions.
2135
:param revision_id: The id of the revision in which the lines are
2137
:param unique_line_numbers: The line numbers of unique lines.
2138
:return a tuple of (new_this, killed_other):
2142
unique_line_numbers = set(unique_line_numbers)
2143
for lca in self.lcas:
2144
blocks = self._get_matching_blocks(revision_id, lca)
2145
unique_vs_lca, _ignored = self._unique_lines(blocks)
2146
new.update(unique_line_numbers.intersection(unique_vs_lca))
2147
killed.update(unique_line_numbers.difference(unique_vs_lca))