1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
from itertools import chain
30
revision as _mod_revision,
33
from bzrlib.branch import Branch
34
from bzrlib.conflicts import ConflictList, Conflict
35
from bzrlib.errors import (BzrCommandError,
45
WorkingTreeNotRevision,
48
from bzrlib.graph import Graph
49
from bzrlib.merge3 import Merge3
50
from bzrlib.osutils import rename, pathjoin
51
from progress import DummyProgress, ProgressPhase
52
from bzrlib.revision import (NULL_REVISION, ensure_null)
53
from bzrlib.textfile import check_text_lines
54
from bzrlib.trace import mutter, warning, note, is_quiet
55
from bzrlib.transform import (TransformPreview, TreeTransform,
56
resolve_conflicts, cook_conflicts,
57
conflict_pass, FinalPaths, create_by_entry,
58
unique_add, ROOT_PARENT)
59
from bzrlib.versionedfile import PlanWeaveMerge
62
# TODO: Report back as changes are merged in
65
def transform_tree(from_tree, to_tree, interesting_ids=None):
66
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
67
interesting_ids=interesting_ids, this_tree=from_tree)
71
def __init__(self, this_branch, other_tree=None, base_tree=None,
72
this_tree=None, pb=DummyProgress(), change_reporter=None,
73
recurse='down', revision_graph=None):
75
self.this_branch = this_branch
76
self.this_basis = _mod_revision.ensure_null(
77
this_branch.last_revision())
78
self.this_rev_id = None
79
self.this_tree = this_tree
80
self.this_revision_tree = None
81
self.this_basis_tree = None
82
self.other_tree = other_tree
83
self.other_branch = None
84
self.base_tree = base_tree
85
self.ignore_zero = False
86
self.backup_files = False
87
self.interesting_ids = None
88
self.interesting_files = None
89
self.show_base = False
90
self.reprocess = False
93
self.recurse = recurse
94
self.change_reporter = change_reporter
95
self._cached_trees = {}
96
self._revision_graph = revision_graph
97
self._base_is_ancestor = None
98
self._base_is_other_ancestor = None
101
def revision_graph(self):
102
if self._revision_graph is None:
103
self._revision_graph = self.this_branch.repository.get_graph()
104
return self._revision_graph
106
def _set_base_is_ancestor(self, value):
107
self._base_is_ancestor = value
109
def _get_base_is_ancestor(self):
110
if self._base_is_ancestor is None:
111
self._base_is_ancestor = self.revision_graph.is_ancestor(
112
self.base_rev_id, self.this_basis)
113
return self._base_is_ancestor
115
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
117
def _set_base_is_other_ancestor(self, value):
118
self._base_is_other_ancestor = value
120
def _get_base_is_other_ancestor(self):
121
if self._base_is_other_ancestor is None:
122
if self.other_basis is None:
124
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
125
self.base_rev_id, self.other_basis)
126
return self._base_is_other_ancestor
128
base_is_other_ancestor = property(_get_base_is_other_ancestor,
129
_set_base_is_other_ancestor)
132
def from_uncommitted(tree, other_tree, pb):
133
"""Return a Merger for uncommitted changes in other_tree.
135
:param tree: The tree to merge into
136
:param other_tree: The tree to get uncommitted changes from
137
:param pb: A progress indicator
139
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
141
merger.base_rev_id = merger.base_tree.get_revision_id()
142
merger.other_rev_id = None
143
merger.other_basis = merger.base_rev_id
147
def from_mergeable(klass, tree, mergeable, pb):
148
"""Return a Merger for a bundle or merge directive.
150
:param tree: The tree to merge changes into
151
:param mergeable: A merge directive or bundle
152
:param pb: A progress indicator
154
mergeable.install_revisions(tree.branch.repository)
155
base_revision_id, other_revision_id, verified =\
156
mergeable.get_merge_request(tree.branch.repository)
157
revision_graph = tree.branch.repository.get_graph()
158
if base_revision_id is not None:
159
if (base_revision_id != _mod_revision.NULL_REVISION and
160
revision_graph.is_ancestor(
161
base_revision_id, tree.branch.last_revision())):
162
base_revision_id = None
164
warning('Performing cherrypick')
165
merger = klass.from_revision_ids(pb, tree, other_revision_id,
166
base_revision_id, revision_graph=
168
return merger, verified
171
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
172
base_branch=None, revision_graph=None,
174
"""Return a Merger for revision-ids.
176
:param tree: The tree to merge changes into
177
:param other: The revision-id to use as OTHER
178
:param base: The revision-id to use as BASE. If not specified, will
180
:param other_branch: A branch containing the other revision-id. If
181
not supplied, tree.branch is used.
182
:param base_branch: A branch containing the base revision-id. If
183
not supplied, other_branch or tree.branch will be used.
184
:param revision_graph: If you have a revision_graph precomputed, pass
185
it in, otherwise it will be created for you.
186
:param pb: A progress indicator
188
if tree_branch is None:
189
tree_branch = tree.branch
190
merger = Merger(tree_branch, this_tree=tree, pb=pb,
191
revision_graph=revision_graph)
192
if other_branch is None:
193
other_branch = tree.branch
194
merger.set_other_revision(other, other_branch)
198
if base_branch is None:
199
base_branch = other_branch
200
merger.set_base_revision(base, base_branch)
203
def revision_tree(self, revision_id, branch=None):
204
if revision_id not in self._cached_trees:
206
branch = self.this_branch
208
tree = self.this_tree.revision_tree(revision_id)
209
except errors.NoSuchRevisionInTree:
210
tree = branch.repository.revision_tree(revision_id)
211
self._cached_trees[revision_id] = tree
212
return self._cached_trees[revision_id]
214
def _get_tree(self, treespec, possible_transports=None):
215
from bzrlib import workingtree
216
location, revno = treespec
218
tree = workingtree.WorkingTree.open_containing(location)[0]
219
return tree.branch, tree
220
branch = Branch.open_containing(location, possible_transports)[0]
222
revision_id = branch.last_revision()
224
revision_id = branch.get_rev_id(revno)
225
revision_id = ensure_null(revision_id)
226
return branch, self.revision_tree(revision_id, branch)
228
def ensure_revision_trees(self):
229
if self.this_revision_tree is None:
230
self.this_basis_tree = self.revision_tree(self.this_basis)
231
if self.this_basis == self.this_rev_id:
232
self.this_revision_tree = self.this_basis_tree
234
if self.other_rev_id is None:
235
other_basis_tree = self.revision_tree(self.other_basis)
236
changes = other_basis_tree.changes_from(self.other_tree)
237
if changes.has_changed():
238
raise WorkingTreeNotRevision(self.this_tree)
239
other_rev_id = self.other_basis
240
self.other_tree = other_basis_tree
242
def file_revisions(self, file_id):
243
self.ensure_revision_trees()
244
def get_id(tree, file_id):
245
revision_id = tree.inventory[file_id].revision
247
if self.this_rev_id is None:
248
if self.this_basis_tree.get_file_sha1(file_id) != \
249
self.this_tree.get_file_sha1(file_id):
250
raise WorkingTreeNotRevision(self.this_tree)
252
trees = (self.this_basis_tree, self.other_tree)
253
return [get_id(tree, file_id) for tree in trees]
255
def check_basis(self, check_clean, require_commits=True):
256
if self.this_basis is None and require_commits is True:
257
raise BzrCommandError("This branch has no commits."
258
" (perhaps you would prefer 'bzr pull')")
261
if self.this_basis != self.this_rev_id:
262
raise errors.UncommittedChanges(self.this_tree)
264
def compare_basis(self):
266
basis_tree = self.revision_tree(self.this_tree.last_revision())
267
except errors.NoSuchRevision:
268
basis_tree = self.this_tree.basis_tree()
269
changes = self.this_tree.changes_from(basis_tree)
270
if not changes.has_changed():
271
self.this_rev_id = self.this_basis
273
def set_interesting_files(self, file_list):
274
self.interesting_files = file_list
276
def set_pending(self):
277
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
281
def _add_parent(self):
282
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
283
new_parent_trees = []
284
for revision_id in new_parents:
286
tree = self.revision_tree(revision_id)
287
except errors.NoSuchRevision:
291
new_parent_trees.append((revision_id, tree))
293
self.this_tree.set_parent_trees(new_parent_trees,
294
allow_leftmost_as_ghost=True)
296
for _revision_id, tree in new_parent_trees:
300
def set_other(self, other_revision, possible_transports=None):
301
"""Set the revision and tree to merge from.
303
This sets the other_tree, other_rev_id, other_basis attributes.
305
:param other_revision: The [path, revision] list to merge from.
307
self.other_branch, self.other_tree = self._get_tree(other_revision,
309
if other_revision[1] == -1:
310
self.other_rev_id = _mod_revision.ensure_null(
311
self.other_branch.last_revision())
312
if _mod_revision.is_null(self.other_rev_id):
313
raise NoCommits(self.other_branch)
314
self.other_basis = self.other_rev_id
315
elif other_revision[1] is not None:
316
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
317
self.other_basis = self.other_rev_id
319
self.other_rev_id = None
320
self.other_basis = self.other_branch.last_revision()
321
if self.other_basis is None:
322
raise NoCommits(self.other_branch)
323
if self.other_rev_id is not None:
324
self._cached_trees[self.other_rev_id] = self.other_tree
325
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
327
def set_other_revision(self, revision_id, other_branch):
328
"""Set 'other' based on a branch and revision id
330
:param revision_id: The revision to use for a tree
331
:param other_branch: The branch containing this tree
333
self.other_rev_id = revision_id
334
self.other_branch = other_branch
335
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
336
self.other_tree = self.revision_tree(revision_id)
337
self.other_basis = revision_id
339
def set_base_revision(self, revision_id, branch):
340
"""Set 'base' based on a branch and revision id
342
:param revision_id: The revision to use for a tree
343
:param branch: The branch containing this tree
345
self.base_rev_id = revision_id
346
self.base_branch = branch
347
self._maybe_fetch(branch, self.this_branch, revision_id)
348
self.base_tree = self.revision_tree(revision_id)
350
def _maybe_fetch(self, source, target, revision_id):
351
if not source.repository.has_same_location(target.repository):
352
target.fetch(source, revision_id)
355
revisions = [ensure_null(self.this_basis),
356
ensure_null(self.other_basis)]
357
if NULL_REVISION in revisions:
358
self.base_rev_id = NULL_REVISION
360
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
361
revisions[0], revisions[1], count_steps=True)
362
if self.base_rev_id == NULL_REVISION:
363
raise UnrelatedBranches()
365
warning('Warning: criss-cross merge encountered. See bzr'
366
' help criss-cross.')
367
self.base_tree = self.revision_tree(self.base_rev_id)
368
self.base_is_ancestor = True
369
self.base_is_other_ancestor = True
371
def set_base(self, base_revision):
372
"""Set the base revision to use for the merge.
374
:param base_revision: A 2-list containing a path and revision number.
376
mutter("doing merge() with no base_revision specified")
377
if base_revision == [None, None]:
380
base_branch, self.base_tree = self._get_tree(base_revision)
381
if base_revision[1] == -1:
382
self.base_rev_id = base_branch.last_revision()
383
elif base_revision[1] is None:
384
self.base_rev_id = _mod_revision.NULL_REVISION
386
self.base_rev_id = _mod_revision.ensure_null(
387
base_branch.get_rev_id(base_revision[1]))
388
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
390
def make_merger(self):
391
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
392
'other_tree': self.other_tree,
393
'interesting_ids': self.interesting_ids,
394
'interesting_files': self.interesting_files,
397
if self.merge_type.requires_base:
398
kwargs['base_tree'] = self.base_tree
399
if self.merge_type.supports_reprocess:
400
kwargs['reprocess'] = self.reprocess
402
raise BzrError("Conflict reduction is not supported for merge"
403
" type %s." % self.merge_type)
404
if self.merge_type.supports_show_base:
405
kwargs['show_base'] = self.show_base
407
raise BzrError("Showing base is not supported for this"
408
" merge type. %s" % self.merge_type)
409
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
410
and not self.base_is_other_ancestor):
411
raise errors.CannotReverseCherrypick()
412
if self.merge_type.supports_cherrypick:
413
kwargs['cherrypick'] = (not self.base_is_ancestor or
414
not self.base_is_other_ancestor)
415
return self.merge_type(pb=self._pb,
416
change_reporter=self.change_reporter,
419
def _do_merge_to(self, merge):
421
if self.recurse == 'down':
422
for relpath, file_id in self.this_tree.iter_references():
423
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
424
other_revision = self.other_tree.get_reference_revision(
426
if other_revision == sub_tree.last_revision():
428
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
429
sub_merge.merge_type = self.merge_type
430
other_branch = self.other_branch.reference_parent(file_id, relpath)
431
sub_merge.set_other_revision(other_revision, other_branch)
432
base_revision = self.base_tree.get_reference_revision(file_id)
433
sub_merge.base_tree = \
434
sub_tree.branch.repository.revision_tree(base_revision)
435
sub_merge.base_rev_id = base_revision
439
self.this_tree.lock_tree_write()
441
if self.base_tree is not None:
442
self.base_tree.lock_read()
444
if self.other_tree is not None:
445
self.other_tree.lock_read()
447
merge = self.make_merger()
448
self._do_merge_to(merge)
450
if self.other_tree is not None:
451
self.other_tree.unlock()
453
if self.base_tree is not None:
454
self.base_tree.unlock()
456
self.this_tree.unlock()
457
if len(merge.cooked_conflicts) == 0:
458
if not self.ignore_zero and not is_quiet():
459
note("All changes applied successfully.")
461
note("%d conflicts encountered." % len(merge.cooked_conflicts))
463
return len(merge.cooked_conflicts)
466
class Merge3Merger(object):
467
"""Three-way merger that uses the merge3 text merger"""
469
supports_reprocess = True
470
supports_show_base = True
471
history_based = False
472
supports_cherrypick = True
473
supports_reverse_cherrypick = True
474
winner_idx = {"this": 2, "other": 1, "conflict": 1}
476
def __init__(self, working_tree, this_tree, base_tree, other_tree,
477
interesting_ids=None, reprocess=False, show_base=False,
478
pb=DummyProgress(), pp=None, change_reporter=None,
479
interesting_files=None, do_merge=True,
481
"""Initialize the merger object and perform the merge.
483
:param working_tree: The working tree to apply the merge to
484
:param this_tree: The local tree in the merge operation
485
:param base_tree: The common tree in the merge operation
486
:param other_tree: The other other tree to merge changes from
487
:param interesting_ids: The file_ids of files that should be
488
participate in the merge. May not be combined with
490
:param: reprocess If True, perform conflict-reduction processing.
491
:param show_base: If True, show the base revision in text conflicts.
492
(incompatible with reprocess)
493
:param pb: A Progress bar
494
:param pp: A ProgressPhase object
495
:param change_reporter: An object that should report changes made
496
:param interesting_files: The tree-relative paths of files that should
497
participate in the merge. If these paths refer to directories,
498
the contents of those directories will also be included. May not
499
be combined with interesting_ids. If neither interesting_files nor
500
interesting_ids is specified, all files may participate in the
503
object.__init__(self)
504
if interesting_files is not None and interesting_ids is not None:
506
'specify either interesting_ids or interesting_files')
507
self.interesting_ids = interesting_ids
508
self.interesting_files = interesting_files
509
self.this_tree = working_tree
510
self.base_tree = base_tree
511
self.other_tree = other_tree
512
self._raw_conflicts = []
513
self.cooked_conflicts = []
514
self.reprocess = reprocess
515
self.show_base = show_base
518
self.change_reporter = change_reporter
519
self.cherrypick = cherrypick
521
self.pp = ProgressPhase("Merge phase", 3, self.pb)
526
self.this_tree.lock_tree_write()
527
self.base_tree.lock_read()
528
self.other_tree.lock_read()
529
self.tt = TreeTransform(self.this_tree, self.pb)
532
self._compute_transform()
534
results = self.tt.apply(no_conflicts=True)
535
self.write_modified(results)
537
self.this_tree.add_conflicts(self.cooked_conflicts)
538
except UnsupportedOperation:
542
self.other_tree.unlock()
543
self.base_tree.unlock()
544
self.this_tree.unlock()
547
def make_preview_transform(self):
548
self.base_tree.lock_read()
549
self.other_tree.lock_read()
550
self.tt = TransformPreview(self.this_tree)
553
self._compute_transform()
556
self.other_tree.unlock()
557
self.base_tree.unlock()
561
def _compute_transform(self):
562
entries = self._entries3()
563
child_pb = ui.ui_factory.nested_progress_bar()
565
for num, (file_id, changed, parents3, names3,
566
executable3) in enumerate(entries):
567
child_pb.update('Preparing file merge', num, len(entries))
568
self._merge_names(file_id, parents3, names3)
570
file_status = self.merge_contents(file_id)
572
file_status = 'unmodified'
573
self._merge_executable(file_id,
574
executable3, file_status)
579
child_pb = ui.ui_factory.nested_progress_bar()
581
fs_conflicts = resolve_conflicts(self.tt, child_pb,
582
lambda t, c: conflict_pass(t, c, self.other_tree))
585
if self.change_reporter is not None:
586
from bzrlib import delta
587
delta.report_changes(
588
self.tt.iter_changes(), self.change_reporter)
589
self.cook_conflicts(fs_conflicts)
590
for conflict in self.cooked_conflicts:
594
"""Gather data about files modified between three trees.
596
Return a list of tuples of file_id, changed, parents3, names3,
597
executable3. changed is a boolean indicating whether the file contents
598
or kind were changed. parents3 is a tuple of parent ids for base,
599
other and this. names3 is a tuple of names for base, other and this.
600
executable3 is a tuple of execute-bit values for base, other and this.
603
iterator = self.other_tree.iter_changes(self.base_tree,
604
include_unchanged=True, specific_files=self.interesting_files,
605
extra_trees=[self.this_tree])
606
this_entries = dict((e.file_id, e) for p, e in
607
self.this_tree.iter_entries_by_dir(
608
self.interesting_ids))
609
for (file_id, paths, changed, versioned, parents, names, kind,
610
executable) in iterator:
611
if (self.interesting_ids is not None and
612
file_id not in self.interesting_ids):
614
entry = this_entries.get(file_id)
615
if entry is not None:
616
this_name = entry.name
617
this_parent = entry.parent_id
618
this_executable = entry.executable
622
this_executable = None
623
parents3 = parents + (this_parent,)
624
names3 = names + (this_name,)
625
executable3 = executable + (this_executable,)
626
result.append((file_id, changed, parents3, names3, executable3))
631
self.tt.final_kind(self.tt.root)
633
self.tt.cancel_deletion(self.tt.root)
634
if self.tt.final_file_id(self.tt.root) is None:
635
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
637
if self.other_tree.inventory.root is None:
639
other_root_file_id = self.other_tree.get_root_id()
640
other_root = self.tt.trans_id_file_id(other_root_file_id)
641
if other_root == self.tt.root:
644
self.tt.final_kind(other_root)
647
if self.other_tree.inventory.root.file_id in self.this_tree.inventory:
648
# the other tree's root is a non-root in the current tree
650
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
651
self.tt.cancel_creation(other_root)
652
self.tt.cancel_versioning(other_root)
654
def reparent_children(self, ie, target):
655
for thing, child in ie.children.iteritems():
656
trans_id = self.tt.trans_id_file_id(child.file_id)
657
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
659
def write_modified(self, results):
661
for path in results.modified_paths:
662
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
665
hash = self.this_tree.get_file_sha1(file_id)
668
modified_hashes[file_id] = hash
669
self.this_tree.set_merge_modified(modified_hashes)
672
def parent(entry, file_id):
673
"""Determine the parent for a file_id (used as a key method)"""
676
return entry.parent_id
679
def name(entry, file_id):
680
"""Determine the name for a file_id (used as a key method)"""
686
def contents_sha1(tree, file_id):
687
"""Determine the sha1 of the file contents (used as a key method)."""
688
if file_id not in tree:
690
return tree.get_file_sha1(file_id)
693
def executable(tree, file_id):
694
"""Determine the executability of a file-id (used as a key method)."""
695
if file_id not in tree:
697
if tree.kind(file_id) != "file":
699
return tree.is_executable(file_id)
702
def kind(tree, file_id):
703
"""Determine the kind of a file-id (used as a key method)."""
704
if file_id not in tree:
706
return tree.kind(file_id)
709
def _three_way(base, other, this):
710
#if base == other, either they all agree, or only THIS has changed.
713
elif this not in (base, other):
715
# "Ambiguous clean merge" -- both sides have made the same change.
718
# this == base: only other has changed.
723
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
724
"""Do a three-way test on a scalar.
725
Return "this", "other" or "conflict", depending whether a value wins.
727
key_base = key(base_tree, file_id)
728
key_other = key(other_tree, file_id)
729
#if base == other, either they all agree, or only THIS has changed.
730
if key_base == key_other:
732
key_this = key(this_tree, file_id)
733
# "Ambiguous clean merge"
734
if key_this == key_other:
736
elif key_this == key_base:
741
def merge_names(self, file_id):
743
if file_id in tree.inventory:
744
return tree.inventory[file_id]
747
this_entry = get_entry(self.this_tree)
748
other_entry = get_entry(self.other_tree)
749
base_entry = get_entry(self.base_tree)
750
entries = (base_entry, other_entry, this_entry)
753
for entry in entries:
758
names.append(entry.name)
759
parents.append(entry.parent_id)
760
return self._merge_names(file_id, parents, names)
762
def _merge_names(self, file_id, parents, names):
763
"""Perform a merge on file_id names and parents"""
764
base_name, other_name, this_name = names
765
base_parent, other_parent, this_parent = parents
767
name_winner = self._three_way(*names)
769
parent_id_winner = self._three_way(*parents)
770
if this_name is None:
771
if name_winner == "this":
772
name_winner = "other"
773
if parent_id_winner == "this":
774
parent_id_winner = "other"
775
if name_winner == "this" and parent_id_winner == "this":
777
if name_winner == "conflict":
778
trans_id = self.tt.trans_id_file_id(file_id)
779
self._raw_conflicts.append(('name conflict', trans_id,
780
this_name, other_name))
781
if parent_id_winner == "conflict":
782
trans_id = self.tt.trans_id_file_id(file_id)
783
self._raw_conflicts.append(('parent conflict', trans_id,
784
this_parent, other_parent))
785
if other_name is None:
786
# it doesn't matter whether the result was 'other' or
787
# 'conflict'-- if there's no 'other', we leave it alone.
789
# if we get here, name_winner and parent_winner are set to safe values.
790
trans_id = self.tt.trans_id_file_id(file_id)
791
parent_id = parents[self.winner_idx[parent_id_winner]]
792
if parent_id is not None:
793
parent_trans_id = self.tt.trans_id_file_id(parent_id)
794
self.tt.adjust_path(names[self.winner_idx[name_winner]],
795
parent_trans_id, trans_id)
797
def merge_contents(self, file_id):
798
"""Performa a merge on file_id contents."""
799
def contents_pair(tree):
800
if file_id not in tree:
802
kind = tree.kind(file_id)
804
contents = tree.get_file_sha1(file_id)
805
elif kind == "symlink":
806
contents = tree.get_symlink_target(file_id)
809
return kind, contents
811
def contents_conflict():
812
trans_id = self.tt.trans_id_file_id(file_id)
813
name = self.tt.final_name(trans_id)
814
parent_id = self.tt.final_parent(trans_id)
815
if file_id in self.this_tree.inventory:
816
self.tt.unversion_file(trans_id)
817
if file_id in self.this_tree:
818
self.tt.delete_contents(trans_id)
819
file_group = self._dump_conflicts(name, parent_id, file_id,
821
self._raw_conflicts.append(('contents conflict', file_group))
823
# See SPOT run. run, SPOT, run.
824
# So we're not QUITE repeating ourselves; we do tricky things with
826
base_pair = contents_pair(self.base_tree)
827
other_pair = contents_pair(self.other_tree)
828
if base_pair == other_pair:
829
# OTHER introduced no changes
831
this_pair = contents_pair(self.this_tree)
832
if this_pair == other_pair:
833
# THIS and OTHER introduced the same changes
836
trans_id = self.tt.trans_id_file_id(file_id)
837
if this_pair == base_pair:
838
# only OTHER introduced changes
839
if file_id in self.this_tree:
840
# Remove any existing contents
841
self.tt.delete_contents(trans_id)
842
if file_id in self.other_tree:
843
# OTHER changed the file
844
create_by_entry(self.tt,
845
self.other_tree.inventory[file_id],
846
self.other_tree, trans_id)
847
if file_id not in self.this_tree:
848
self.tt.version_file(file_id, trans_id)
850
elif file_id in self.this_tree.inventory:
851
# OTHER deleted the file
852
self.tt.unversion_file(trans_id)
854
#BOTH THIS and OTHER introduced changes; scalar conflict
855
elif this_pair[0] == "file" and other_pair[0] == "file":
856
# THIS and OTHER are both files, so text merge. Either
857
# BASE is a file, or both converted to files, so at least we
858
# have agreement that output should be a file.
860
self.text_merge(file_id, trans_id)
862
return contents_conflict()
863
if file_id not in self.this_tree:
864
self.tt.version_file(file_id, trans_id)
866
self.tt.tree_kind(trans_id)
867
self.tt.delete_contents(trans_id)
872
# Scalar conflict, can't text merge. Dump conflicts
873
return contents_conflict()
875
def get_lines(self, tree, file_id):
876
"""Return the lines in a file, or an empty list."""
878
return tree.get_file(file_id).readlines()
882
def text_merge(self, file_id, trans_id):
883
"""Perform a three-way text merge on a file_id"""
884
# it's possible that we got here with base as a different type.
885
# if so, we just want two-way text conflicts.
886
if file_id in self.base_tree and \
887
self.base_tree.kind(file_id) == "file":
888
base_lines = self.get_lines(self.base_tree, file_id)
891
other_lines = self.get_lines(self.other_tree, file_id)
892
this_lines = self.get_lines(self.this_tree, file_id)
893
m3 = Merge3(base_lines, this_lines, other_lines,
894
is_cherrypick=self.cherrypick)
895
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
896
if self.show_base is True:
897
base_marker = '|' * 7
901
def iter_merge3(retval):
902
retval["text_conflicts"] = False
903
for line in m3.merge_lines(name_a = "TREE",
904
name_b = "MERGE-SOURCE",
905
name_base = "BASE-REVISION",
906
start_marker=start_marker,
907
base_marker=base_marker,
908
reprocess=self.reprocess):
909
if line.startswith(start_marker):
910
retval["text_conflicts"] = True
911
yield line.replace(start_marker, '<' * 7)
915
merge3_iterator = iter_merge3(retval)
916
self.tt.create_file(merge3_iterator, trans_id)
917
if retval["text_conflicts"] is True:
918
self._raw_conflicts.append(('text conflict', trans_id))
919
name = self.tt.final_name(trans_id)
920
parent_id = self.tt.final_parent(trans_id)
921
file_group = self._dump_conflicts(name, parent_id, file_id,
922
this_lines, base_lines,
924
file_group.append(trans_id)
926
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
927
base_lines=None, other_lines=None, set_version=False,
929
"""Emit conflict files.
930
If this_lines, base_lines, or other_lines are omitted, they will be
931
determined automatically. If set_version is true, the .OTHER, .THIS
932
or .BASE (in that order) will be created as versioned files.
934
data = [('OTHER', self.other_tree, other_lines),
935
('THIS', self.this_tree, this_lines)]
937
data.append(('BASE', self.base_tree, base_lines))
940
for suffix, tree, lines in data:
942
trans_id = self._conflict_file(name, parent_id, tree, file_id,
944
file_group.append(trans_id)
945
if set_version and not versioned:
946
self.tt.version_file(file_id, trans_id)
950
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
952
"""Emit a single conflict file."""
953
name = name + '.' + suffix
954
trans_id = self.tt.create_path(name, parent_id)
955
entry = tree.inventory[file_id]
956
create_by_entry(self.tt, entry, tree, trans_id, lines)
959
def merge_executable(self, file_id, file_status):
960
"""Perform a merge on the execute bit."""
961
executable = [self.executable(t, file_id) for t in (self.base_tree,
962
self.other_tree, self.this_tree)]
963
self._merge_executable(file_id, executable, file_status)
965
def _merge_executable(self, file_id, executable, file_status):
966
"""Perform a merge on the execute bit."""
967
base_executable, other_executable, this_executable = executable
968
if file_status == "deleted":
970
winner = self._three_way(*executable)
971
if winner == "conflict":
972
# There must be a None in here, if we have a conflict, but we
973
# need executability since file status was not deleted.
974
if self.executable(self.other_tree, file_id) is None:
978
if winner == 'this' and file_status != "modified":
980
trans_id = self.tt.trans_id_file_id(file_id)
982
if self.tt.final_kind(trans_id) != "file":
987
executability = this_executable
989
if file_id in self.other_tree:
990
executability = other_executable
991
elif file_id in self.this_tree:
992
executability = this_executable
993
elif file_id in self.base_tree:
994
executability = base_executable
995
if executability is not None:
996
trans_id = self.tt.trans_id_file_id(file_id)
997
self.tt.set_executability(executability, trans_id)
999
def cook_conflicts(self, fs_conflicts):
1000
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1001
from conflicts import Conflict
1003
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
1004
fp = FinalPaths(self.tt)
1005
for conflict in self._raw_conflicts:
1006
conflict_type = conflict[0]
1007
if conflict_type in ('name conflict', 'parent conflict'):
1008
trans_id = conflict[1]
1009
conflict_args = conflict[2:]
1010
if trans_id not in name_conflicts:
1011
name_conflicts[trans_id] = {}
1012
unique_add(name_conflicts[trans_id], conflict_type,
1014
if conflict_type == 'contents conflict':
1015
for trans_id in conflict[1]:
1016
file_id = self.tt.final_file_id(trans_id)
1017
if file_id is not None:
1019
path = fp.get_path(trans_id)
1020
for suffix in ('.BASE', '.THIS', '.OTHER'):
1021
if path.endswith(suffix):
1022
path = path[:-len(suffix)]
1024
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1025
self.cooked_conflicts.append(c)
1026
if conflict_type == 'text conflict':
1027
trans_id = conflict[1]
1028
path = fp.get_path(trans_id)
1029
file_id = self.tt.final_file_id(trans_id)
1030
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1031
self.cooked_conflicts.append(c)
1033
for trans_id, conflicts in name_conflicts.iteritems():
1035
this_parent, other_parent = conflicts['parent conflict']
1036
if this_parent == other_parent:
1037
raise AssertionError()
1039
this_parent = other_parent = \
1040
self.tt.final_file_id(self.tt.final_parent(trans_id))
1042
this_name, other_name = conflicts['name conflict']
1043
if this_name == other_name:
1044
raise AssertionError()
1046
this_name = other_name = self.tt.final_name(trans_id)
1047
other_path = fp.get_path(trans_id)
1048
if this_parent is not None and this_name is not None:
1049
this_parent_path = \
1050
fp.get_path(self.tt.trans_id_file_id(this_parent))
1051
this_path = pathjoin(this_parent_path, this_name)
1053
this_path = "<deleted>"
1054
file_id = self.tt.final_file_id(trans_id)
1055
c = Conflict.factory('path conflict', path=this_path,
1056
conflict_path=other_path, file_id=file_id)
1057
self.cooked_conflicts.append(c)
1058
self.cooked_conflicts.sort(key=Conflict.sort_key)
1061
class WeaveMerger(Merge3Merger):
1062
"""Three-way tree merger, text weave merger."""
1063
supports_reprocess = True
1064
supports_show_base = False
1065
supports_reverse_cherrypick = False
1066
history_based = True
1068
def _merged_lines(self, file_id):
1069
"""Generate the merged lines.
1070
There is no distinction between lines that are meant to contain <<<<<<<
1074
base = self.base_tree
1077
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1079
if 'merge' in debug.debug_flags:
1081
trans_id = self.tt.trans_id_file_id(file_id)
1082
name = self.tt.final_name(trans_id) + '.plan'
1083
contents = ('%10s|%s' % l for l in plan)
1084
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1085
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1086
'>>>>>>> MERGE-SOURCE\n')
1087
return textmerge.merge_lines(self.reprocess)
1089
def text_merge(self, file_id, trans_id):
1090
"""Perform a (weave) text merge for a given file and file-id.
1091
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1092
and a conflict will be noted.
1094
lines, conflicts = self._merged_lines(file_id)
1096
# Note we're checking whether the OUTPUT is binary in this case,
1097
# because we don't want to get into weave merge guts.
1098
check_text_lines(lines)
1099
self.tt.create_file(lines, trans_id)
1101
self._raw_conflicts.append(('text conflict', trans_id))
1102
name = self.tt.final_name(trans_id)
1103
parent_id = self.tt.final_parent(trans_id)
1104
file_group = self._dump_conflicts(name, parent_id, file_id,
1106
file_group.append(trans_id)
1109
class LCAMerger(WeaveMerger):
1111
def _merged_lines(self, file_id):
1112
"""Generate the merged lines.
1113
There is no distinction between lines that are meant to contain <<<<<<<
1117
base = self.base_tree
1120
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1122
if 'merge' in debug.debug_flags:
1124
trans_id = self.tt.trans_id_file_id(file_id)
1125
name = self.tt.final_name(trans_id) + '.plan'
1126
contents = ('%10s|%s' % l for l in plan)
1127
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1128
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1129
'>>>>>>> MERGE-SOURCE\n')
1130
return textmerge.merge_lines(self.reprocess)
1133
class Diff3Merger(Merge3Merger):
1134
"""Three-way merger using external diff3 for text merging"""
1136
def dump_file(self, temp_dir, name, tree, file_id):
1137
out_path = pathjoin(temp_dir, name)
1138
out_file = open(out_path, "wb")
1140
in_file = tree.get_file(file_id)
1141
for line in in_file:
1142
out_file.write(line)
1147
def text_merge(self, file_id, trans_id):
1148
"""Perform a diff3 merge using a specified file-id and trans-id.
1149
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1150
will be dumped, and a will be conflict noted.
1153
temp_dir = osutils.mkdtemp(prefix="bzr-")
1155
new_file = pathjoin(temp_dir, "new")
1156
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1157
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1158
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1159
status = bzrlib.patch.diff3(new_file, this, base, other)
1160
if status not in (0, 1):
1161
raise BzrError("Unhandled diff3 exit code")
1162
f = open(new_file, 'rb')
1164
self.tt.create_file(f, trans_id)
1168
name = self.tt.final_name(trans_id)
1169
parent_id = self.tt.final_parent(trans_id)
1170
self._dump_conflicts(name, parent_id, file_id)
1171
self._raw_conflicts.append(('text conflict', trans_id))
1173
osutils.rmtree(temp_dir)
1176
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1178
merge_type=Merge3Merger,
1179
interesting_ids=None,
1183
interesting_files=None,
1186
change_reporter=None):
1187
"""Primary interface for merging.
1189
typical use is probably
1190
'merge_inner(branch, branch.get_revision_tree(other_revision),
1191
branch.get_revision_tree(base_revision))'
1193
if this_tree is None:
1194
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1195
"parameter as of bzrlib version 0.8.")
1196
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1197
pb=pb, change_reporter=change_reporter)
1198
merger.backup_files = backup_files
1199
merger.merge_type = merge_type
1200
merger.interesting_ids = interesting_ids
1201
merger.ignore_zero = ignore_zero
1202
if interesting_files:
1204
raise ValueError('Only supply interesting_ids'
1205
' or interesting_files')
1206
merger.interesting_files = interesting_files
1207
merger.show_base = show_base
1208
merger.reprocess = reprocess
1209
merger.other_rev_id = other_rev_id
1210
merger.other_basis = other_rev_id
1211
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1212
if get_revision_id is None:
1213
get_revision_id = base_tree.last_revision
1214
merger.set_base_revision(get_revision_id(), this_branch)
1215
return merger.do_merge()
1217
def get_merge_type_registry():
1218
"""Merge type registry is in bzrlib.option to avoid circular imports.
1220
This method provides a sanctioned way to retrieve it.
1222
from bzrlib import option
1223
return option._merge_type_registry
1226
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1227
def status_a(revision, text):
1228
if revision in ancestors_b:
1229
return 'killed-b', text
1231
return 'new-a', text
1233
def status_b(revision, text):
1234
if revision in ancestors_a:
1235
return 'killed-a', text
1237
return 'new-b', text
1239
plain_a = [t for (a, t) in annotated_a]
1240
plain_b = [t for (a, t) in annotated_b]
1241
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1242
blocks = matcher.get_matching_blocks()
1245
for ai, bi, l in blocks:
1246
# process all mismatched sections
1247
# (last mismatched section is handled because blocks always
1248
# includes a 0-length last block)
1249
for revision, text in annotated_a[a_cur:ai]:
1250
yield status_a(revision, text)
1251
for revision, text in annotated_b[b_cur:bi]:
1252
yield status_b(revision, text)
1253
# and now the matched section
1256
for text_a in plain_a[ai:a_cur]:
1257
yield "unchanged", text_a
1260
class _PlanMergeBase(object):
1262
def __init__(self, a_rev, b_rev, vf, key_prefix):
1265
:param a_rev: Revision-id of one revision to merge
1266
:param b_rev: Revision-id of the other revision to merge
1267
:param vf: A VersionedFiles containing both revisions
1268
:param key_prefix: A prefix for accessing keys in vf, typically
1274
self._last_lines = None
1275
self._last_lines_revision_id = None
1276
self._cached_matching_blocks = {}
1277
self._key_prefix = key_prefix
1278
self._precache_tip_lines()
1280
def _precache_tip_lines(self):
1281
lines = self.get_lines([self.a_rev, self.b_rev])
1282
self.lines_a = lines[self.a_rev]
1283
self.lines_b = lines[self.b_rev]
1285
def get_lines(self, revisions):
1286
"""Get lines for revisions from the backing VersionedFiles.
1288
:raises RevisionNotPresent: on absent texts.
1290
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1292
for record in self.vf.get_record_stream(keys, 'unordered', True):
1293
if record.storage_kind == 'absent':
1294
raise errors.RevisionNotPresent(record.key, self.vf)
1295
result[record.key[-1]] = osutils.split_lines(
1296
record.get_bytes_as('fulltext'))
1299
def plan_merge(self):
1300
"""Generate a 'plan' for merging the two revisions.
1302
This involves comparing their texts and determining the cause of
1303
differences. If text A has a line and text B does not, then either the
1304
line was added to text A, or it was deleted from B. Once the causes
1305
are combined, they are written out in the format described in
1306
VersionedFile.plan_merge
1308
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1309
unique_a, unique_b = self._unique_lines(blocks)
1310
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1311
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1312
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1314
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1317
for i, j, n in blocks:
1318
for a_index in range(last_i, i):
1319
if a_index in new_a:
1320
if a_index in killed_b:
1321
yield 'conflicted-a', self.lines_a[a_index]
1323
yield 'new-a', self.lines_a[a_index]
1325
yield 'killed-b', self.lines_a[a_index]
1326
for b_index in range(last_j, j):
1327
if b_index in new_b:
1328
if b_index in killed_a:
1329
yield 'conflicted-b', self.lines_b[b_index]
1331
yield 'new-b', self.lines_b[b_index]
1333
yield 'killed-a', self.lines_b[b_index]
1334
# handle common lines
1335
for a_index in range(i, i+n):
1336
yield 'unchanged', self.lines_a[a_index]
1340
def _get_matching_blocks(self, left_revision, right_revision):
1341
"""Return a description of which sections of two revisions match.
1343
See SequenceMatcher.get_matching_blocks
1345
cached = self._cached_matching_blocks.get((left_revision,
1347
if cached is not None:
1349
if self._last_lines_revision_id == left_revision:
1350
left_lines = self._last_lines
1351
right_lines = self.get_lines([right_revision])[right_revision]
1353
lines = self.get_lines([left_revision, right_revision])
1354
left_lines = lines[left_revision]
1355
right_lines = lines[right_revision]
1356
self._last_lines = right_lines
1357
self._last_lines_revision_id = right_revision
1358
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1360
return matcher.get_matching_blocks()
1362
def _unique_lines(self, matching_blocks):
1363
"""Analyse matching_blocks to determine which lines are unique
1365
:return: a tuple of (unique_left, unique_right), where the values are
1366
sets of line numbers of unique lines.
1372
for i, j, n in matching_blocks:
1373
unique_left.extend(range(last_i, i))
1374
unique_right.extend(range(last_j, j))
1377
return unique_left, unique_right
1380
def _subtract_plans(old_plan, new_plan):
1381
"""Remove changes from new_plan that came from old_plan.
1383
It is assumed that the difference between the old_plan and new_plan
1384
is their choice of 'b' text.
1386
All lines from new_plan that differ from old_plan are emitted
1387
verbatim. All lines from new_plan that match old_plan but are
1388
not about the 'b' revision are emitted verbatim.
1390
Lines that match and are about the 'b' revision are the lines we
1391
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1392
is skipped entirely.
1394
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1397
for i, j, n in matcher.get_matching_blocks():
1398
for jj in range(last_j, j):
1400
for jj in range(j, j+n):
1401
plan_line = new_plan[jj]
1402
if plan_line[0] == 'new-b':
1404
elif plan_line[0] == 'killed-b':
1405
yield 'unchanged', plan_line[1]
1411
class _PlanMerge(_PlanMergeBase):
1412
"""Plan an annotate merge using on-the-fly annotation"""
1414
def __init__(self, a_rev, b_rev, vf, key_prefix):
1415
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1416
self.a_key = self._key_prefix + (self.a_rev,)
1417
self.b_key = self._key_prefix + (self.b_rev,)
1418
self.graph = Graph(self.vf)
1419
heads = self.graph.heads((self.a_key, self.b_key))
1421
# one side dominates, so we can just return its values, yay for
1423
# Ideally we would know that before we get this far
1424
self._head_key = heads.pop()
1425
if self._head_key == self.a_key:
1429
mutter('found dominating revision for %s\n%s > %s', self.vf,
1430
self._head_key[-1], other)
1433
self._head_key = None
1436
def _precache_tip_lines(self):
1437
# Turn this into a no-op, because we will do this later
1440
def _find_recursive_lcas(self):
1441
"""Find all the ancestors back to a unique lca"""
1442
cur_ancestors = (self.a_key, self.b_key)
1443
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1444
# rather than a key tuple. We will just map that directly to no common
1448
next_lcas = self.graph.find_lca(*cur_ancestors)
1449
# Map a plain NULL_REVISION to a simple no-ancestors
1450
if next_lcas == set([NULL_REVISION]):
1452
# Order the lca's based on when they were merged into the tip
1453
# While the actual merge portion of weave merge uses a set() of
1454
# active revisions, the order of insertion *does* effect the
1455
# implicit ordering of the texts.
1456
for rev_key in cur_ancestors:
1457
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1459
parent_map[rev_key] = ordered_parents
1460
if len(next_lcas) == 0:
1462
elif len(next_lcas) == 1:
1463
parent_map[list(next_lcas)[0]] = ()
1465
elif len(next_lcas) > 2:
1466
# More than 2 lca's, fall back to grabbing all nodes between
1467
# this and the unique lca.
1468
mutter('More than 2 LCAs, falling back to all nodes for:'
1469
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1470
cur_lcas = next_lcas
1471
while len(cur_lcas) > 1:
1472
cur_lcas = self.graph.find_lca(*cur_lcas)
1473
if len(cur_lcas) == 0:
1474
# No common base to find, use the full ancestry
1477
unique_lca = list(cur_lcas)[0]
1478
if unique_lca == NULL_REVISION:
1479
# find_lca will return a plain 'NULL_REVISION' rather
1480
# than a key tuple when there is no common ancestor, we
1481
# prefer to just use None, because it doesn't confuse
1482
# _get_interesting_texts()
1484
parent_map.update(self._find_unique_parents(next_lcas,
1487
cur_ancestors = next_lcas
1490
def _find_unique_parents(self, tip_keys, base_key):
1491
"""Find ancestors of tip that aren't ancestors of base.
1493
:param tip_keys: Nodes that are interesting
1494
:param base_key: Cull all ancestors of this node
1495
:return: The parent map for all revisions between tip_keys and
1496
base_key. base_key will be included. References to nodes outside of
1497
the ancestor set will also be removed.
1499
# TODO: this would be simpler if find_unique_ancestors took a list
1500
# instead of a single tip, internally it supports it, but it
1501
# isn't a "backwards compatible" api change.
1502
if base_key is None:
1503
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1504
# We remove NULL_REVISION because it isn't a proper tuple key, and
1505
# thus confuses things like _get_interesting_texts, and our logic
1506
# to add the texts into the memory weave.
1507
if NULL_REVISION in parent_map:
1508
parent_map.pop(NULL_REVISION)
1511
for tip in tip_keys:
1513
self.graph.find_unique_ancestors(tip, [base_key]))
1514
parent_map = self.graph.get_parent_map(interesting)
1515
parent_map[base_key] = ()
1516
culled_parent_map, child_map, tails = self._remove_external_references(
1518
# Remove all the tails but base_key
1519
if base_key is not None:
1520
tails.remove(base_key)
1521
self._prune_tails(culled_parent_map, child_map, tails)
1522
# Now remove all the uninteresting 'linear' regions
1523
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1527
def _remove_external_references(parent_map):
1528
"""Remove references that go outside of the parent map.
1530
:param parent_map: Something returned from Graph.get_parent_map(keys)
1531
:return: (filtered_parent_map, child_map, tails)
1532
filtered_parent_map is parent_map without external references
1533
child_map is the {parent_key: [child_keys]} mapping
1534
tails is a list of nodes that do not have any parents in the map
1536
# TODO: The basic effect of this function seems more generic than
1537
# _PlanMerge. But the specific details of building a child_map,
1538
# and computing tails seems very specific to _PlanMerge.
1539
# Still, should this be in Graph land?
1540
filtered_parent_map = {}
1543
for key, parent_keys in parent_map.iteritems():
1544
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1545
if not culled_parent_keys:
1547
for parent_key in culled_parent_keys:
1548
child_map.setdefault(parent_key, []).append(key)
1549
# TODO: Do we want to do this, it adds overhead for every node,
1550
# just to say that the node has no children
1551
child_map.setdefault(key, [])
1552
filtered_parent_map[key] = culled_parent_keys
1553
return filtered_parent_map, child_map, tails
1556
def _prune_tails(parent_map, child_map, tails_to_remove):
1557
"""Remove tails from the parent map.
1559
This will remove the supplied revisions until no more children have 0
1562
:param parent_map: A dict of {child: [parents]}, this dictionary will
1563
be modified in place.
1564
:param tails_to_remove: A list of tips that should be removed,
1565
this list will be consumed
1566
:param child_map: The reverse dict of parent_map ({parent: [children]})
1567
this dict will be modified
1568
:return: None, parent_map will be modified in place.
1570
while tails_to_remove:
1571
next = tails_to_remove.pop()
1572
parent_map.pop(next)
1573
children = child_map.pop(next)
1574
for child in children:
1575
child_parents = parent_map[child]
1576
child_parents.remove(next)
1577
if len(child_parents) == 0:
1578
tails_to_remove.append(child)
1580
def _get_interesting_texts(self, parent_map):
1581
"""Return a dict of texts we are interested in.
1583
Note that the input is in key tuples, but the output is in plain
1586
:param parent_map: The output from _find_recursive_lcas
1587
:return: A dict of {'revision_id':lines} as returned by
1588
_PlanMergeBase.get_lines()
1590
all_revision_keys = set(parent_map)
1591
all_revision_keys.add(self.a_key)
1592
all_revision_keys.add(self.b_key)
1594
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1595
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1598
def _build_weave(self):
1599
from bzrlib import weave
1600
self._weave = weave.Weave(weave_name='in_memory_weave',
1601
allow_reserved=True)
1602
parent_map = self._find_recursive_lcas()
1604
all_texts = self._get_interesting_texts(parent_map)
1606
# Note: Unfortunately, the order given by topo_sort will effect the
1607
# ordering resolution in the output. Specifically, if you add A then B,
1608
# then in the output text A lines will show up before B lines. And, of
1609
# course, topo_sort doesn't guarantee any real ordering.
1610
# So we use merge_sort, and add a fake node on the tip.
1611
# This ensures that left-hand parents will always be inserted into the
1612
# weave before right-hand parents.
1613
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1614
parent_map[tip_key] = (self.a_key, self.b_key)
1616
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1620
# for key in tsort.topo_sort(parent_map):
1621
parent_keys = parent_map[key]
1622
revision_id = key[-1]
1623
parent_ids = [k[-1] for k in parent_keys]
1624
self._weave.add_lines(revision_id, parent_ids,
1625
all_texts[revision_id])
1627
def plan_merge(self):
1628
"""Generate a 'plan' for merging the two revisions.
1630
This involves comparing their texts and determining the cause of
1631
differences. If text A has a line and text B does not, then either the
1632
line was added to text A, or it was deleted from B. Once the causes
1633
are combined, they are written out in the format described in
1634
VersionedFile.plan_merge
1636
if self._head_key is not None: # There was a single head
1637
if self._head_key == self.a_key:
1640
if self._head_key != self.b_key:
1641
raise AssertionError('There was an invalid head: %s != %s'
1642
% (self.b_key, self._head_key))
1644
head_rev = self._head_key[-1]
1645
lines = self.get_lines([head_rev])[head_rev]
1646
return ((plan, line) for line in lines)
1647
return self._weave.plan_merge(self.a_rev, self.b_rev)
1650
class _PlanLCAMerge(_PlanMergeBase):
1652
This merge algorithm differs from _PlanMerge in that:
1653
1. comparisons are done against LCAs only
1654
2. cases where a contested line is new versus one LCA but old versus
1655
another are marked as conflicts, by emitting the line as conflicted-a
1658
This is faster, and hopefully produces more useful output.
1661
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1662
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1663
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1666
if lca == NULL_REVISION:
1669
self.lcas.add(lca[-1])
1670
for lca in self.lcas:
1671
if _mod_revision.is_null(lca):
1674
lca_lines = self.get_lines([lca])[lca]
1675
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1677
blocks = list(matcher.get_matching_blocks())
1678
self._cached_matching_blocks[(a_rev, lca)] = blocks
1679
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1681
blocks = list(matcher.get_matching_blocks())
1682
self._cached_matching_blocks[(b_rev, lca)] = blocks
1684
def _determine_status(self, revision_id, unique_line_numbers):
1685
"""Determines the status unique lines versus all lcas.
1687
Basically, determines why the line is unique to this revision.
1689
A line may be determined new, killed, or both.
1691
If a line is determined new, that means it was not present in at least
1692
one LCA, and is not present in the other merge revision.
1694
If a line is determined killed, that means the line was present in
1697
If a line is killed and new, this indicates that the two merge
1698
revisions contain differing conflict resolutions.
1699
:param revision_id: The id of the revision in which the lines are
1701
:param unique_line_numbers: The line numbers of unique lines.
1702
:return a tuple of (new_this, killed_other):
1706
unique_line_numbers = set(unique_line_numbers)
1707
for lca in self.lcas:
1708
blocks = self._get_matching_blocks(revision_id, lca)
1709
unique_vs_lca, _ignored = self._unique_lines(blocks)
1710
new.update(unique_line_numbers.intersection(unique_vs_lca))
1711
killed.update(unique_line_numbers.difference(unique_vs_lca))