1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
from itertools import chain
30
revision as _mod_revision,
34
from bzrlib.branch import Branch
35
from bzrlib.conflicts import ConflictList, Conflict
36
from bzrlib.errors import (BzrCommandError,
46
WorkingTreeNotRevision,
49
from bzrlib.graph import Graph
50
from bzrlib.merge3 import Merge3
51
from bzrlib.osutils import rename, pathjoin
52
from progress import DummyProgress, ProgressPhase
53
from bzrlib.revision import (NULL_REVISION, ensure_null)
54
from bzrlib.textfile import check_text_lines
55
from bzrlib.trace import mutter, warning, note, is_quiet
56
from bzrlib.transform import (TransformPreview, TreeTransform,
57
resolve_conflicts, cook_conflicts,
58
conflict_pass, FinalPaths, create_by_entry,
59
unique_add, ROOT_PARENT)
60
from bzrlib.versionedfile import PlanWeaveMerge
63
# TODO: Report back as changes are merged in
66
def transform_tree(from_tree, to_tree, interesting_ids=None):
67
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
68
interesting_ids=interesting_ids, this_tree=from_tree)
72
def __init__(self, this_branch, other_tree=None, base_tree=None,
73
this_tree=None, pb=DummyProgress(), change_reporter=None,
74
recurse='down', revision_graph=None):
76
self.this_branch = this_branch
77
self.this_basis = _mod_revision.ensure_null(
78
this_branch.last_revision())
79
self.this_rev_id = None
80
self.this_tree = this_tree
81
self.this_revision_tree = None
82
self.this_basis_tree = None
83
self.other_tree = other_tree
84
self.other_branch = None
85
self.base_tree = base_tree
86
self.ignore_zero = False
87
self.backup_files = False
88
self.interesting_ids = None
89
self.interesting_files = None
90
self.show_base = False
91
self.reprocess = False
94
self.recurse = recurse
95
self.change_reporter = change_reporter
96
self._cached_trees = {}
97
self._revision_graph = revision_graph
98
self._base_is_ancestor = None
99
self._base_is_other_ancestor = None
100
self._is_criss_cross = False
101
self._lca_trees = None
104
def revision_graph(self):
105
if self._revision_graph is None:
106
self._revision_graph = self.this_branch.repository.get_graph()
107
return self._revision_graph
109
def _set_base_is_ancestor(self, value):
110
self._base_is_ancestor = value
112
def _get_base_is_ancestor(self):
113
if self._base_is_ancestor is None:
114
self._base_is_ancestor = self.revision_graph.is_ancestor(
115
self.base_rev_id, self.this_basis)
116
return self._base_is_ancestor
118
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
120
def _set_base_is_other_ancestor(self, value):
121
self._base_is_other_ancestor = value
123
def _get_base_is_other_ancestor(self):
124
if self._base_is_other_ancestor is None:
125
if self.other_basis is None:
127
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
128
self.base_rev_id, self.other_basis)
129
return self._base_is_other_ancestor
131
base_is_other_ancestor = property(_get_base_is_other_ancestor,
132
_set_base_is_other_ancestor)
135
def from_uncommitted(tree, other_tree, pb):
136
"""Return a Merger for uncommitted changes in other_tree.
138
:param tree: The tree to merge into
139
:param other_tree: The tree to get uncommitted changes from
140
:param pb: A progress indicator
142
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
144
merger.base_rev_id = merger.base_tree.get_revision_id()
145
merger.other_rev_id = None
146
merger.other_basis = merger.base_rev_id
150
def from_mergeable(klass, tree, mergeable, pb):
151
"""Return a Merger for a bundle or merge directive.
153
:param tree: The tree to merge changes into
154
:param mergeable: A merge directive or bundle
155
:param pb: A progress indicator
157
mergeable.install_revisions(tree.branch.repository)
158
base_revision_id, other_revision_id, verified =\
159
mergeable.get_merge_request(tree.branch.repository)
160
revision_graph = tree.branch.repository.get_graph()
161
if base_revision_id is not None:
162
if (base_revision_id != _mod_revision.NULL_REVISION and
163
revision_graph.is_ancestor(
164
base_revision_id, tree.branch.last_revision())):
165
base_revision_id = None
167
warning('Performing cherrypick')
168
merger = klass.from_revision_ids(pb, tree, other_revision_id,
169
base_revision_id, revision_graph=
171
return merger, verified
174
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
175
base_branch=None, revision_graph=None):
176
"""Return a Merger for revision-ids.
178
:param tree: The tree to merge changes into
179
:param other: The revision-id to use as OTHER
180
:param base: The revision-id to use as BASE. If not specified, will
182
:param other_branch: A branch containing the other revision-id. If
183
not supplied, tree.branch is used.
184
:param base_branch: A branch containing the base revision-id. If
185
not supplied, other_branch or tree.branch will be used.
186
:param revision_graph: If you have a revision_graph precomputed, pass
187
it in, otherwise it will be created for you.
188
:param pb: A progress indicator
190
merger = Merger(tree.branch, this_tree=tree, pb=pb,
191
revision_graph=revision_graph)
192
if other_branch is None:
193
other_branch = tree.branch
194
merger.set_other_revision(other, other_branch)
198
if base_branch is None:
199
base_branch = other_branch
200
merger.set_base_revision(base, base_branch)
203
def revision_tree(self, revision_id, branch=None):
204
if revision_id not in self._cached_trees:
206
branch = self.this_branch
208
tree = self.this_tree.revision_tree(revision_id)
209
except errors.NoSuchRevisionInTree:
210
tree = branch.repository.revision_tree(revision_id)
211
self._cached_trees[revision_id] = tree
212
return self._cached_trees[revision_id]
214
def _get_tree(self, treespec, possible_transports=None):
215
from bzrlib import workingtree
216
location, revno = treespec
218
tree = workingtree.WorkingTree.open_containing(location)[0]
219
return tree.branch, tree
220
branch = Branch.open_containing(location, possible_transports)[0]
222
revision_id = branch.last_revision()
224
revision_id = branch.get_rev_id(revno)
225
revision_id = ensure_null(revision_id)
226
return branch, self.revision_tree(revision_id, branch)
228
def ensure_revision_trees(self):
229
if self.this_revision_tree is None:
230
self.this_basis_tree = self.revision_tree(self.this_basis)
231
if self.this_basis == self.this_rev_id:
232
self.this_revision_tree = self.this_basis_tree
234
if self.other_rev_id is None:
235
other_basis_tree = self.revision_tree(self.other_basis)
236
changes = other_basis_tree.changes_from(self.other_tree)
237
if changes.has_changed():
238
raise WorkingTreeNotRevision(self.this_tree)
239
other_rev_id = self.other_basis
240
self.other_tree = other_basis_tree
242
def file_revisions(self, file_id):
243
self.ensure_revision_trees()
244
def get_id(tree, file_id):
245
revision_id = tree.inventory[file_id].revision
247
if self.this_rev_id is None:
248
if self.this_basis_tree.get_file_sha1(file_id) != \
249
self.this_tree.get_file_sha1(file_id):
250
raise WorkingTreeNotRevision(self.this_tree)
252
trees = (self.this_basis_tree, self.other_tree)
253
return [get_id(tree, file_id) for tree in trees]
255
def check_basis(self, check_clean, require_commits=True):
256
if self.this_basis is None and require_commits is True:
257
raise BzrCommandError("This branch has no commits."
258
" (perhaps you would prefer 'bzr pull')")
261
if self.this_basis != self.this_rev_id:
262
raise errors.UncommittedChanges(self.this_tree)
264
def compare_basis(self):
266
basis_tree = self.revision_tree(self.this_tree.last_revision())
267
except errors.NoSuchRevision:
268
basis_tree = self.this_tree.basis_tree()
269
changes = self.this_tree.changes_from(basis_tree)
270
if not changes.has_changed():
271
self.this_rev_id = self.this_basis
273
def set_interesting_files(self, file_list):
274
self.interesting_files = file_list
276
def set_pending(self):
277
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
281
def _add_parent(self):
282
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
283
new_parent_trees = []
284
for revision_id in new_parents:
286
tree = self.revision_tree(revision_id)
287
except errors.NoSuchRevision:
291
new_parent_trees.append((revision_id, tree))
293
self.this_tree.set_parent_trees(new_parent_trees,
294
allow_leftmost_as_ghost=True)
296
for _revision_id, tree in new_parent_trees:
300
def set_other(self, other_revision, possible_transports=None):
301
"""Set the revision and tree to merge from.
303
This sets the other_tree, other_rev_id, other_basis attributes.
305
:param other_revision: The [path, revision] list to merge from.
307
self.other_branch, self.other_tree = self._get_tree(other_revision,
309
if other_revision[1] == -1:
310
self.other_rev_id = _mod_revision.ensure_null(
311
self.other_branch.last_revision())
312
if _mod_revision.is_null(self.other_rev_id):
313
raise NoCommits(self.other_branch)
314
self.other_basis = self.other_rev_id
315
elif other_revision[1] is not None:
316
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
317
self.other_basis = self.other_rev_id
319
self.other_rev_id = None
320
self.other_basis = self.other_branch.last_revision()
321
if self.other_basis is None:
322
raise NoCommits(self.other_branch)
323
if self.other_rev_id is not None:
324
self._cached_trees[self.other_rev_id] = self.other_tree
325
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
327
def set_other_revision(self, revision_id, other_branch):
328
"""Set 'other' based on a branch and revision id
330
:param revision_id: The revision to use for a tree
331
:param other_branch: The branch containing this tree
333
self.other_rev_id = revision_id
334
self.other_branch = other_branch
335
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
336
self.other_tree = self.revision_tree(revision_id)
337
self.other_basis = revision_id
339
def set_base_revision(self, revision_id, branch):
340
"""Set 'base' based on a branch and revision id
342
:param revision_id: The revision to use for a tree
343
:param branch: The branch containing this tree
345
self.base_rev_id = revision_id
346
self.base_branch = branch
347
self._maybe_fetch(branch, self.this_branch, revision_id)
348
self.base_tree = self.revision_tree(revision_id)
350
def _maybe_fetch(self, source, target, revision_id):
351
if not source.repository.has_same_location(target.repository):
352
target.fetch(source, revision_id)
355
revisions = [ensure_null(self.this_basis),
356
ensure_null(self.other_basis)]
357
if NULL_REVISION in revisions:
358
self.base_rev_id = NULL_REVISION
360
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
362
self.base_rev_id = NULL_REVISION
364
self.base_rev_id = list(lcas)[0]
365
else: # len(lcas) > 1
366
self.base_rev_id = self.revision_graph.find_unique_lca(
368
self._is_criss_cross = True
369
if self.base_rev_id == NULL_REVISION:
370
raise UnrelatedBranches()
371
if self._is_criss_cross:
372
warning('Warning: criss-cross merge encountered. See bzr'
373
' help criss-cross.')
374
interesting_revision_ids = [self.base_rev_id]
375
interesting_revision_ids.extend(lcas)
376
interesting_trees = dict((t.get_revision_id(), t)
377
for t in self.this_branch.repository.revision_trees(
378
interesting_revision_ids))
379
self._cached_trees.update(interesting_trees)
380
self.base_tree = interesting_trees.pop(self.base_rev_id)
381
self._lca_trees = interesting_trees
383
self.base_tree = self.revision_tree(self.base_rev_id)
384
self.base_is_ancestor = True
385
self.base_is_other_ancestor = True
387
def set_base(self, base_revision):
388
"""Set the base revision to use for the merge.
390
:param base_revision: A 2-list containing a path and revision number.
392
mutter("doing merge() with no base_revision specified")
393
if base_revision == [None, None]:
396
base_branch, self.base_tree = self._get_tree(base_revision)
397
if base_revision[1] == -1:
398
self.base_rev_id = base_branch.last_revision()
399
elif base_revision[1] is None:
400
self.base_rev_id = _mod_revision.NULL_REVISION
402
self.base_rev_id = _mod_revision.ensure_null(
403
base_branch.get_rev_id(base_revision[1]))
404
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
406
def make_merger(self):
407
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
408
'other_tree': self.other_tree,
409
'interesting_ids': self.interesting_ids,
410
'interesting_files': self.interesting_files,
413
if self.merge_type.requires_base:
414
kwargs['base_tree'] = self.base_tree
415
if self.merge_type.supports_reprocess:
416
kwargs['reprocess'] = self.reprocess
418
raise BzrError("Conflict reduction is not supported for merge"
419
" type %s." % self.merge_type)
420
if self.merge_type.supports_show_base:
421
kwargs['show_base'] = self.show_base
423
raise BzrError("Showing base is not supported for this"
424
" merge type. %s" % self.merge_type)
425
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
426
and not self.base_is_other_ancestor):
427
raise errors.CannotReverseCherrypick()
428
if self.merge_type.supports_cherrypick:
429
kwargs['cherrypick'] = (not self.base_is_ancestor or
430
not self.base_is_other_ancestor)
431
if self._is_criss_cross and getattr(self.merge_type,
432
'supports_lca_trees', False):
433
kwargs['lca_trees'] = self._lca_trees
434
return self.merge_type(pb=self._pb,
435
change_reporter=self.change_reporter,
439
self.this_tree.lock_tree_write()
440
if self.base_tree is not None:
441
self.base_tree.lock_read()
442
if self.other_tree is not None:
443
self.other_tree.lock_read()
445
merge = self.make_merger()
447
if self.recurse == 'down':
448
for relpath, file_id in self.this_tree.iter_references():
449
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
450
other_revision = self.other_tree.get_reference_revision(
452
if other_revision == sub_tree.last_revision():
454
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
455
sub_merge.merge_type = self.merge_type
456
other_branch = self.other_branch.reference_parent(file_id, relpath)
457
sub_merge.set_other_revision(other_revision, other_branch)
458
base_revision = self.base_tree.get_reference_revision(file_id)
459
sub_merge.base_tree = \
460
sub_tree.branch.repository.revision_tree(base_revision)
461
sub_merge.base_rev_id = base_revision
465
if self.other_tree is not None:
466
self.other_tree.unlock()
467
if self.base_tree is not None:
468
self.base_tree.unlock()
469
self.this_tree.unlock()
470
if len(merge.cooked_conflicts) == 0:
471
if not self.ignore_zero and not is_quiet():
472
note("All changes applied successfully.")
474
note("%d conflicts encountered." % len(merge.cooked_conflicts))
476
return len(merge.cooked_conflicts)
479
class Merge3Merger(object):
480
"""Three-way merger that uses the merge3 text merger"""
482
supports_reprocess = True
483
supports_show_base = True
484
history_based = False
485
supports_cherrypick = True
486
supports_reverse_cherrypick = True
487
winner_idx = {"this": 2, "other": 1, "conflict": 1}
488
supports_lca_trees = True
490
def __init__(self, working_tree, this_tree, base_tree, other_tree,
491
interesting_ids=None, reprocess=False, show_base=False,
492
pb=DummyProgress(), pp=None, change_reporter=None,
493
interesting_files=None, do_merge=True,
494
cherrypick=False, lca_trees=None):
495
"""Initialize the merger object and perform the merge.
497
:param working_tree: The working tree to apply the merge to
498
:param this_tree: The local tree in the merge operation
499
:param base_tree: The common tree in the merge operation
500
:param other_tree: The other other tree to merge changes from
501
:param interesting_ids: The file_ids of files that should be
502
participate in the merge. May not be combined with
504
:param: reprocess If True, perform conflict-reduction processing.
505
:param show_base: If True, show the base revision in text conflicts.
506
(incompatible with reprocess)
507
:param pb: A Progress bar
508
:param pp: A ProgressPhase object
509
:param change_reporter: An object that should report changes made
510
:param interesting_files: The tree-relative paths of files that should
511
participate in the merge. If these paths refer to directories,
512
the contents of those directories will also be included. May not
513
be combined with interesting_ids. If neither interesting_files nor
514
interesting_ids is specified, all files may participate in the
516
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
517
if the ancestry was found to include a criss-cross merge.
518
Otherwise should be None.
520
object.__init__(self)
521
if interesting_files is not None and interesting_ids is not None:
523
'specify either interesting_ids or interesting_files')
524
self.interesting_ids = interesting_ids
525
self.interesting_files = interesting_files
526
self.this_tree = working_tree
527
self.base_tree = base_tree
528
self.other_tree = other_tree
529
self._raw_conflicts = []
530
self.cooked_conflicts = []
531
self.reprocess = reprocess
532
self.show_base = show_base
533
self._lca_trees = lca_trees
536
self.change_reporter = change_reporter
537
self.cherrypick = cherrypick
539
self.pp = ProgressPhase("Merge phase", 3, self.pb)
544
self.this_tree.lock_tree_write()
545
self.base_tree.lock_read()
546
self.other_tree.lock_read()
547
self.tt = TreeTransform(self.this_tree, self.pb)
550
self._compute_transform()
552
results = self.tt.apply(no_conflicts=True)
553
self.write_modified(results)
555
self.this_tree.add_conflicts(self.cooked_conflicts)
556
except UnsupportedOperation:
560
self.other_tree.unlock()
561
self.base_tree.unlock()
562
self.this_tree.unlock()
565
def make_preview_transform(self):
566
self.base_tree.lock_read()
567
self.other_tree.lock_read()
568
self.tt = TransformPreview(self.this_tree)
571
self._compute_transform()
574
self.other_tree.unlock()
575
self.base_tree.unlock()
579
def _compute_transform(self):
580
entries = self._entries3()
581
child_pb = ui.ui_factory.nested_progress_bar()
583
for num, (file_id, changed, parents3, names3,
584
executable3) in enumerate(entries):
585
child_pb.update('Preparing file merge', num, len(entries))
586
self._merge_names(file_id, parents3, names3)
588
file_status = self.merge_contents(file_id)
590
file_status = 'unmodified'
591
self._merge_executable(file_id,
592
executable3, file_status)
597
child_pb = ui.ui_factory.nested_progress_bar()
599
fs_conflicts = resolve_conflicts(self.tt, child_pb,
600
lambda t, c: conflict_pass(t, c, self.other_tree))
603
if self.change_reporter is not None:
604
from bzrlib import delta
605
delta.report_changes(
606
self.tt.iter_changes(), self.change_reporter)
607
self.cook_conflicts(fs_conflicts)
608
for conflict in self.cooked_conflicts:
612
"""Gather data about files modified between three trees.
614
Return a list of tuples of file_id, changed, parents3, names3,
615
executable3. changed is a boolean indicating whether the file contents
616
or kind were changed. parents3 is a tuple of parent ids for base,
617
other and this. names3 is a tuple of names for base, other and this.
618
executable3 is a tuple of execute-bit values for base, other and this.
621
iterator = self.other_tree.iter_changes(self.base_tree,
622
include_unchanged=True, specific_files=self.interesting_files,
623
extra_trees=[self.this_tree])
624
for (file_id, paths, changed, versioned, parents, names, kind,
625
executable) in iterator:
626
if (self.interesting_ids is not None and
627
file_id not in self.interesting_ids):
629
if file_id in self.this_tree.inventory:
630
entry = self.this_tree.inventory[file_id]
631
this_name = entry.name
632
this_parent = entry.parent_id
633
this_executable = entry.executable
637
this_executable = None
638
parents3 = parents + (this_parent,)
639
names3 = names + (this_name,)
640
executable3 = executable + (this_executable,)
641
result.append((file_id, changed, parents3, names3, executable3))
644
def _entries_lca(self):
645
"""Gather data about files modified between multiple trees.
647
This compares OTHER versus all LCA trees, and for interesting entries,
648
it then compares with THIS and BASE.
650
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
651
:return: [(file_id, changed, parents, names, executable)]
652
file_id Simple file_id of the entry
653
changed Boolean, True if the kind or contents changed
655
parents ((base, [parent_id, in, lcas]), parent_id_other,
657
names ((base, [name, in, lcas]), name_in_other, name_in_this)
658
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
661
walker = _mod_tree.MultiWalker(self.other_tree,
662
self._lca_trees.values())
664
for path, file_id, other_ie, lca_values in walker.iter_all():
665
# Is this modified at all from any of the other trees?
666
last_rev = other_ie.revision
667
for lca_path, ie in lca_values:
668
if ie.revision != last_rev:
670
else: # Identical in all trees
672
base_ie = self.base_tree.inventory[file_id]
673
this_ie = self.this_tree.inventory[file_id]
674
result.append((file_id, True,
676
[ie.parent_id for path, ie in lca_values]),
677
other_ie.parent_id, this_ie.parent_id),
679
[ie.name for path, ie in lca_values]),
680
other_ie.name, this_ie.name),
681
((base_ie.executable,
682
[ie.executable for path, ie in lca_values]),
683
other_ie.executable, this_ie.executable)
690
self.tt.final_kind(self.tt.root)
692
self.tt.cancel_deletion(self.tt.root)
693
if self.tt.final_file_id(self.tt.root) is None:
694
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
696
if self.other_tree.inventory.root is None:
698
other_root_file_id = self.other_tree.get_root_id()
699
other_root = self.tt.trans_id_file_id(other_root_file_id)
700
if other_root == self.tt.root:
703
self.tt.final_kind(other_root)
706
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
707
self.tt.cancel_creation(other_root)
708
self.tt.cancel_versioning(other_root)
710
def reparent_children(self, ie, target):
711
for thing, child in ie.children.iteritems():
712
trans_id = self.tt.trans_id_file_id(child.file_id)
713
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
715
def write_modified(self, results):
717
for path in results.modified_paths:
718
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
721
hash = self.this_tree.get_file_sha1(file_id)
724
modified_hashes[file_id] = hash
725
self.this_tree.set_merge_modified(modified_hashes)
728
def parent(entry, file_id):
729
"""Determine the parent for a file_id (used as a key method)"""
732
return entry.parent_id
735
def name(entry, file_id):
736
"""Determine the name for a file_id (used as a key method)"""
742
def contents_sha1(tree, file_id):
743
"""Determine the sha1 of the file contents (used as a key method)."""
744
if file_id not in tree:
746
return tree.get_file_sha1(file_id)
749
def executable(tree, file_id):
750
"""Determine the executability of a file-id (used as a key method)."""
751
if file_id not in tree:
753
if tree.kind(file_id) != "file":
755
return tree.is_executable(file_id)
758
def kind(tree, file_id):
759
"""Determine the kind of a file-id (used as a key method)."""
760
if file_id not in tree:
762
return tree.kind(file_id)
765
def _three_way(base, other, this):
766
#if base == other, either they all agree, or only THIS has changed.
769
elif this not in (base, other):
771
# "Ambiguous clean merge" -- both sides have made the same change.
774
# this == base: only other has changed.
779
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
780
"""Do a three-way test on a scalar.
781
Return "this", "other" or "conflict", depending whether a value wins.
783
key_base = key(base_tree, file_id)
784
key_other = key(other_tree, file_id)
785
#if base == other, either they all agree, or only THIS has changed.
786
if key_base == key_other:
788
key_this = key(this_tree, file_id)
789
# "Ambiguous clean merge"
790
if key_this == key_other:
792
elif key_this == key_base:
797
def merge_names(self, file_id):
799
if file_id in tree.inventory:
800
return tree.inventory[file_id]
803
this_entry = get_entry(self.this_tree)
804
other_entry = get_entry(self.other_tree)
805
base_entry = get_entry(self.base_tree)
806
entries = (base_entry, other_entry, this_entry)
809
for entry in entries:
814
names.append(entry.name)
815
parents.append(entry.parent_id)
816
return self._merge_names(file_id, parents, names)
818
def _merge_names(self, file_id, parents, names):
819
"""Perform a merge on file_id names and parents"""
820
base_name, other_name, this_name = names
821
base_parent, other_parent, this_parent = parents
823
name_winner = self._three_way(*names)
825
parent_id_winner = self._three_way(*parents)
826
if this_name is None:
827
if name_winner == "this":
828
name_winner = "other"
829
if parent_id_winner == "this":
830
parent_id_winner = "other"
831
if name_winner == "this" and parent_id_winner == "this":
833
if name_winner == "conflict":
834
trans_id = self.tt.trans_id_file_id(file_id)
835
self._raw_conflicts.append(('name conflict', trans_id,
836
this_name, other_name))
837
if parent_id_winner == "conflict":
838
trans_id = self.tt.trans_id_file_id(file_id)
839
self._raw_conflicts.append(('parent conflict', trans_id,
840
this_parent, other_parent))
841
if other_name is None:
842
# it doesn't matter whether the result was 'other' or
843
# 'conflict'-- if there's no 'other', we leave it alone.
845
# if we get here, name_winner and parent_winner are set to safe values.
846
trans_id = self.tt.trans_id_file_id(file_id)
847
parent_id = parents[self.winner_idx[parent_id_winner]]
848
if parent_id is not None:
849
parent_trans_id = self.tt.trans_id_file_id(parent_id)
850
self.tt.adjust_path(names[self.winner_idx[name_winner]],
851
parent_trans_id, trans_id)
853
def merge_contents(self, file_id):
854
"""Performa a merge on file_id contents."""
855
def contents_pair(tree):
856
if file_id not in tree:
858
kind = tree.kind(file_id)
860
contents = tree.get_file_sha1(file_id)
861
elif kind == "symlink":
862
contents = tree.get_symlink_target(file_id)
865
return kind, contents
867
def contents_conflict():
868
trans_id = self.tt.trans_id_file_id(file_id)
869
name = self.tt.final_name(trans_id)
870
parent_id = self.tt.final_parent(trans_id)
871
if file_id in self.this_tree.inventory:
872
self.tt.unversion_file(trans_id)
873
if file_id in self.this_tree:
874
self.tt.delete_contents(trans_id)
875
file_group = self._dump_conflicts(name, parent_id, file_id,
877
self._raw_conflicts.append(('contents conflict', file_group))
879
# See SPOT run. run, SPOT, run.
880
# So we're not QUITE repeating ourselves; we do tricky things with
882
base_pair = contents_pair(self.base_tree)
883
other_pair = contents_pair(self.other_tree)
884
if base_pair == other_pair:
885
# OTHER introduced no changes
887
this_pair = contents_pair(self.this_tree)
888
if this_pair == other_pair:
889
# THIS and OTHER introduced the same changes
892
trans_id = self.tt.trans_id_file_id(file_id)
893
if this_pair == base_pair:
894
# only OTHER introduced changes
895
if file_id in self.this_tree:
896
# Remove any existing contents
897
self.tt.delete_contents(trans_id)
898
if file_id in self.other_tree:
899
# OTHER changed the file
900
create_by_entry(self.tt,
901
self.other_tree.inventory[file_id],
902
self.other_tree, trans_id)
903
if file_id not in self.this_tree.inventory:
904
self.tt.version_file(file_id, trans_id)
906
elif file_id in self.this_tree.inventory:
907
# OTHER deleted the file
908
self.tt.unversion_file(trans_id)
910
#BOTH THIS and OTHER introduced changes; scalar conflict
911
elif this_pair[0] == "file" and other_pair[0] == "file":
912
# THIS and OTHER are both files, so text merge. Either
913
# BASE is a file, or both converted to files, so at least we
914
# have agreement that output should be a file.
916
self.text_merge(file_id, trans_id)
918
return contents_conflict()
919
if file_id not in self.this_tree.inventory:
920
self.tt.version_file(file_id, trans_id)
922
self.tt.tree_kind(trans_id)
923
self.tt.delete_contents(trans_id)
928
# Scalar conflict, can't text merge. Dump conflicts
929
return contents_conflict()
931
def get_lines(self, tree, file_id):
932
"""Return the lines in a file, or an empty list."""
934
return tree.get_file(file_id).readlines()
938
def text_merge(self, file_id, trans_id):
939
"""Perform a three-way text merge on a file_id"""
940
# it's possible that we got here with base as a different type.
941
# if so, we just want two-way text conflicts.
942
if file_id in self.base_tree and \
943
self.base_tree.kind(file_id) == "file":
944
base_lines = self.get_lines(self.base_tree, file_id)
947
other_lines = self.get_lines(self.other_tree, file_id)
948
this_lines = self.get_lines(self.this_tree, file_id)
949
m3 = Merge3(base_lines, this_lines, other_lines,
950
is_cherrypick=self.cherrypick)
951
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
952
if self.show_base is True:
953
base_marker = '|' * 7
957
def iter_merge3(retval):
958
retval["text_conflicts"] = False
959
for line in m3.merge_lines(name_a = "TREE",
960
name_b = "MERGE-SOURCE",
961
name_base = "BASE-REVISION",
962
start_marker=start_marker,
963
base_marker=base_marker,
964
reprocess=self.reprocess):
965
if line.startswith(start_marker):
966
retval["text_conflicts"] = True
967
yield line.replace(start_marker, '<' * 7)
971
merge3_iterator = iter_merge3(retval)
972
self.tt.create_file(merge3_iterator, trans_id)
973
if retval["text_conflicts"] is True:
974
self._raw_conflicts.append(('text conflict', trans_id))
975
name = self.tt.final_name(trans_id)
976
parent_id = self.tt.final_parent(trans_id)
977
file_group = self._dump_conflicts(name, parent_id, file_id,
978
this_lines, base_lines,
980
file_group.append(trans_id)
982
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
983
base_lines=None, other_lines=None, set_version=False,
985
"""Emit conflict files.
986
If this_lines, base_lines, or other_lines are omitted, they will be
987
determined automatically. If set_version is true, the .OTHER, .THIS
988
or .BASE (in that order) will be created as versioned files.
990
data = [('OTHER', self.other_tree, other_lines),
991
('THIS', self.this_tree, this_lines)]
993
data.append(('BASE', self.base_tree, base_lines))
996
for suffix, tree, lines in data:
998
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1000
file_group.append(trans_id)
1001
if set_version and not versioned:
1002
self.tt.version_file(file_id, trans_id)
1006
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1008
"""Emit a single conflict file."""
1009
name = name + '.' + suffix
1010
trans_id = self.tt.create_path(name, parent_id)
1011
entry = tree.inventory[file_id]
1012
create_by_entry(self.tt, entry, tree, trans_id, lines)
1015
def merge_executable(self, file_id, file_status):
1016
"""Perform a merge on the execute bit."""
1017
executable = [self.executable(t, file_id) for t in (self.base_tree,
1018
self.other_tree, self.this_tree)]
1019
self._merge_executable(file_id, executable, file_status)
1021
def _merge_executable(self, file_id, executable, file_status):
1022
"""Perform a merge on the execute bit."""
1023
base_executable, other_executable, this_executable = executable
1024
if file_status == "deleted":
1026
winner = self._three_way(*executable)
1027
if winner == "conflict":
1028
# There must be a None in here, if we have a conflict, but we
1029
# need executability since file status was not deleted.
1030
if self.executable(self.other_tree, file_id) is None:
1034
if winner == 'this' and file_status != "modified":
1036
trans_id = self.tt.trans_id_file_id(file_id)
1038
if self.tt.final_kind(trans_id) != "file":
1042
if winner == "this":
1043
executability = this_executable
1045
if file_id in self.other_tree:
1046
executability = other_executable
1047
elif file_id in self.this_tree:
1048
executability = this_executable
1049
elif file_id in self.base_tree:
1050
executability = base_executable
1051
if executability is not None:
1052
trans_id = self.tt.trans_id_file_id(file_id)
1053
self.tt.set_executability(executability, trans_id)
1055
def cook_conflicts(self, fs_conflicts):
1056
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1057
from conflicts import Conflict
1059
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
1060
fp = FinalPaths(self.tt)
1061
for conflict in self._raw_conflicts:
1062
conflict_type = conflict[0]
1063
if conflict_type in ('name conflict', 'parent conflict'):
1064
trans_id = conflict[1]
1065
conflict_args = conflict[2:]
1066
if trans_id not in name_conflicts:
1067
name_conflicts[trans_id] = {}
1068
unique_add(name_conflicts[trans_id], conflict_type,
1070
if conflict_type == 'contents conflict':
1071
for trans_id in conflict[1]:
1072
file_id = self.tt.final_file_id(trans_id)
1073
if file_id is not None:
1075
path = fp.get_path(trans_id)
1076
for suffix in ('.BASE', '.THIS', '.OTHER'):
1077
if path.endswith(suffix):
1078
path = path[:-len(suffix)]
1080
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1081
self.cooked_conflicts.append(c)
1082
if conflict_type == 'text conflict':
1083
trans_id = conflict[1]
1084
path = fp.get_path(trans_id)
1085
file_id = self.tt.final_file_id(trans_id)
1086
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1087
self.cooked_conflicts.append(c)
1089
for trans_id, conflicts in name_conflicts.iteritems():
1091
this_parent, other_parent = conflicts['parent conflict']
1092
if this_parent == other_parent:
1093
raise AssertionError()
1095
this_parent = other_parent = \
1096
self.tt.final_file_id(self.tt.final_parent(trans_id))
1098
this_name, other_name = conflicts['name conflict']
1099
if this_name == other_name:
1100
raise AssertionError()
1102
this_name = other_name = self.tt.final_name(trans_id)
1103
other_path = fp.get_path(trans_id)
1104
if this_parent is not None and this_name is not None:
1105
this_parent_path = \
1106
fp.get_path(self.tt.trans_id_file_id(this_parent))
1107
this_path = pathjoin(this_parent_path, this_name)
1109
this_path = "<deleted>"
1110
file_id = self.tt.final_file_id(trans_id)
1111
c = Conflict.factory('path conflict', path=this_path,
1112
conflict_path=other_path, file_id=file_id)
1113
self.cooked_conflicts.append(c)
1114
self.cooked_conflicts.sort(key=Conflict.sort_key)
1117
class WeaveMerger(Merge3Merger):
1118
"""Three-way tree merger, text weave merger."""
1119
supports_reprocess = True
1120
supports_show_base = False
1121
supports_reverse_cherrypick = False
1122
history_based = True
1124
def _merged_lines(self, file_id):
1125
"""Generate the merged lines.
1126
There is no distinction between lines that are meant to contain <<<<<<<
1130
base = self.base_tree
1133
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1135
if 'merge' in debug.debug_flags:
1137
trans_id = self.tt.trans_id_file_id(file_id)
1138
name = self.tt.final_name(trans_id) + '.plan'
1139
contents = ('%10s|%s' % l for l in plan)
1140
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1141
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1142
'>>>>>>> MERGE-SOURCE\n')
1143
return textmerge.merge_lines(self.reprocess)
1145
def text_merge(self, file_id, trans_id):
1146
"""Perform a (weave) text merge for a given file and file-id.
1147
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1148
and a conflict will be noted.
1150
lines, conflicts = self._merged_lines(file_id)
1152
# Note we're checking whether the OUTPUT is binary in this case,
1153
# because we don't want to get into weave merge guts.
1154
check_text_lines(lines)
1155
self.tt.create_file(lines, trans_id)
1157
self._raw_conflicts.append(('text conflict', trans_id))
1158
name = self.tt.final_name(trans_id)
1159
parent_id = self.tt.final_parent(trans_id)
1160
file_group = self._dump_conflicts(name, parent_id, file_id,
1162
file_group.append(trans_id)
1165
class LCAMerger(WeaveMerger):
1167
def _merged_lines(self, file_id):
1168
"""Generate the merged lines.
1169
There is no distinction between lines that are meant to contain <<<<<<<
1173
base = self.base_tree
1176
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1178
if 'merge' in debug.debug_flags:
1180
trans_id = self.tt.trans_id_file_id(file_id)
1181
name = self.tt.final_name(trans_id) + '.plan'
1182
contents = ('%10s|%s' % l for l in plan)
1183
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1184
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1185
'>>>>>>> MERGE-SOURCE\n')
1186
return textmerge.merge_lines(self.reprocess)
1189
class Diff3Merger(Merge3Merger):
1190
"""Three-way merger using external diff3 for text merging"""
1192
def dump_file(self, temp_dir, name, tree, file_id):
1193
out_path = pathjoin(temp_dir, name)
1194
out_file = open(out_path, "wb")
1196
in_file = tree.get_file(file_id)
1197
for line in in_file:
1198
out_file.write(line)
1203
def text_merge(self, file_id, trans_id):
1204
"""Perform a diff3 merge using a specified file-id and trans-id.
1205
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1206
will be dumped, and a will be conflict noted.
1209
temp_dir = osutils.mkdtemp(prefix="bzr-")
1211
new_file = pathjoin(temp_dir, "new")
1212
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1213
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1214
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1215
status = bzrlib.patch.diff3(new_file, this, base, other)
1216
if status not in (0, 1):
1217
raise BzrError("Unhandled diff3 exit code")
1218
f = open(new_file, 'rb')
1220
self.tt.create_file(f, trans_id)
1224
name = self.tt.final_name(trans_id)
1225
parent_id = self.tt.final_parent(trans_id)
1226
self._dump_conflicts(name, parent_id, file_id)
1227
self._raw_conflicts.append(('text conflict', trans_id))
1229
osutils.rmtree(temp_dir)
1232
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1234
merge_type=Merge3Merger,
1235
interesting_ids=None,
1239
interesting_files=None,
1242
change_reporter=None):
1243
"""Primary interface for merging.
1245
typical use is probably
1246
'merge_inner(branch, branch.get_revision_tree(other_revision),
1247
branch.get_revision_tree(base_revision))'
1249
if this_tree is None:
1250
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1251
"parameter as of bzrlib version 0.8.")
1252
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1253
pb=pb, change_reporter=change_reporter)
1254
merger.backup_files = backup_files
1255
merger.merge_type = merge_type
1256
merger.interesting_ids = interesting_ids
1257
merger.ignore_zero = ignore_zero
1258
if interesting_files:
1260
raise ValueError('Only supply interesting_ids'
1261
' or interesting_files')
1262
merger.interesting_files = interesting_files
1263
merger.show_base = show_base
1264
merger.reprocess = reprocess
1265
merger.other_rev_id = other_rev_id
1266
merger.other_basis = other_rev_id
1267
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1268
if get_revision_id is None:
1269
get_revision_id = base_tree.last_revision
1270
merger.set_base_revision(get_revision_id(), this_branch)
1271
return merger.do_merge()
1273
def get_merge_type_registry():
1274
"""Merge type registry is in bzrlib.option to avoid circular imports.
1276
This method provides a sanctioned way to retrieve it.
1278
from bzrlib import option
1279
return option._merge_type_registry
1282
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1283
def status_a(revision, text):
1284
if revision in ancestors_b:
1285
return 'killed-b', text
1287
return 'new-a', text
1289
def status_b(revision, text):
1290
if revision in ancestors_a:
1291
return 'killed-a', text
1293
return 'new-b', text
1295
plain_a = [t for (a, t) in annotated_a]
1296
plain_b = [t for (a, t) in annotated_b]
1297
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1298
blocks = matcher.get_matching_blocks()
1301
for ai, bi, l in blocks:
1302
# process all mismatched sections
1303
# (last mismatched section is handled because blocks always
1304
# includes a 0-length last block)
1305
for revision, text in annotated_a[a_cur:ai]:
1306
yield status_a(revision, text)
1307
for revision, text in annotated_b[b_cur:bi]:
1308
yield status_b(revision, text)
1309
# and now the matched section
1312
for text_a in plain_a[ai:a_cur]:
1313
yield "unchanged", text_a
1316
class _PlanMergeBase(object):
1318
def __init__(self, a_rev, b_rev, vf, key_prefix):
1321
:param a_rev: Revision-id of one revision to merge
1322
:param b_rev: Revision-id of the other revision to merge
1323
:param vf: A VersionedFiles containing both revisions
1324
:param key_prefix: A prefix for accessing keys in vf, typically
1330
self._last_lines = None
1331
self._last_lines_revision_id = None
1332
self._cached_matching_blocks = {}
1333
self._key_prefix = key_prefix
1334
self._precache_tip_lines()
1336
def _precache_tip_lines(self):
1337
lines = self.get_lines([self.a_rev, self.b_rev])
1338
self.lines_a = lines[self.a_rev]
1339
self.lines_b = lines[self.b_rev]
1341
def get_lines(self, revisions):
1342
"""Get lines for revisions from the backing VersionedFiles.
1344
:raises RevisionNotPresent: on absent texts.
1346
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1348
for record in self.vf.get_record_stream(keys, 'unordered', True):
1349
if record.storage_kind == 'absent':
1350
raise errors.RevisionNotPresent(record.key, self.vf)
1351
result[record.key[-1]] = osutils.split_lines(
1352
record.get_bytes_as('fulltext'))
1355
def plan_merge(self):
1356
"""Generate a 'plan' for merging the two revisions.
1358
This involves comparing their texts and determining the cause of
1359
differences. If text A has a line and text B does not, then either the
1360
line was added to text A, or it was deleted from B. Once the causes
1361
are combined, they are written out in the format described in
1362
VersionedFile.plan_merge
1364
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1365
unique_a, unique_b = self._unique_lines(blocks)
1366
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1367
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1368
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1370
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1373
for i, j, n in blocks:
1374
for a_index in range(last_i, i):
1375
if a_index in new_a:
1376
if a_index in killed_b:
1377
yield 'conflicted-a', self.lines_a[a_index]
1379
yield 'new-a', self.lines_a[a_index]
1381
yield 'killed-b', self.lines_a[a_index]
1382
for b_index in range(last_j, j):
1383
if b_index in new_b:
1384
if b_index in killed_a:
1385
yield 'conflicted-b', self.lines_b[b_index]
1387
yield 'new-b', self.lines_b[b_index]
1389
yield 'killed-a', self.lines_b[b_index]
1390
# handle common lines
1391
for a_index in range(i, i+n):
1392
yield 'unchanged', self.lines_a[a_index]
1396
def _get_matching_blocks(self, left_revision, right_revision):
1397
"""Return a description of which sections of two revisions match.
1399
See SequenceMatcher.get_matching_blocks
1401
cached = self._cached_matching_blocks.get((left_revision,
1403
if cached is not None:
1405
if self._last_lines_revision_id == left_revision:
1406
left_lines = self._last_lines
1407
right_lines = self.get_lines([right_revision])[right_revision]
1409
lines = self.get_lines([left_revision, right_revision])
1410
left_lines = lines[left_revision]
1411
right_lines = lines[right_revision]
1412
self._last_lines = right_lines
1413
self._last_lines_revision_id = right_revision
1414
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1416
return matcher.get_matching_blocks()
1418
def _unique_lines(self, matching_blocks):
1419
"""Analyse matching_blocks to determine which lines are unique
1421
:return: a tuple of (unique_left, unique_right), where the values are
1422
sets of line numbers of unique lines.
1428
for i, j, n in matching_blocks:
1429
unique_left.extend(range(last_i, i))
1430
unique_right.extend(range(last_j, j))
1433
return unique_left, unique_right
1436
def _subtract_plans(old_plan, new_plan):
1437
"""Remove changes from new_plan that came from old_plan.
1439
It is assumed that the difference between the old_plan and new_plan
1440
is their choice of 'b' text.
1442
All lines from new_plan that differ from old_plan are emitted
1443
verbatim. All lines from new_plan that match old_plan but are
1444
not about the 'b' revision are emitted verbatim.
1446
Lines that match and are about the 'b' revision are the lines we
1447
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1448
is skipped entirely.
1450
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1453
for i, j, n in matcher.get_matching_blocks():
1454
for jj in range(last_j, j):
1456
for jj in range(j, j+n):
1457
plan_line = new_plan[jj]
1458
if plan_line[0] == 'new-b':
1460
elif plan_line[0] == 'killed-b':
1461
yield 'unchanged', plan_line[1]
1467
class _PlanMerge(_PlanMergeBase):
1468
"""Plan an annotate merge using on-the-fly annotation"""
1470
def __init__(self, a_rev, b_rev, vf, key_prefix):
1471
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1472
self.a_key = self._key_prefix + (self.a_rev,)
1473
self.b_key = self._key_prefix + (self.b_rev,)
1474
self.graph = Graph(self.vf)
1475
heads = self.graph.heads((self.a_key, self.b_key))
1477
# one side dominates, so we can just return its values, yay for
1479
# Ideally we would know that before we get this far
1480
self._head_key = heads.pop()
1481
if self._head_key == self.a_key:
1485
mutter('found dominating revision for %s\n%s > %s', self.vf,
1486
self._head_key[-1], other)
1489
self._head_key = None
1492
def _precache_tip_lines(self):
1493
# Turn this into a no-op, because we will do this later
1496
def _find_recursive_lcas(self):
1497
"""Find all the ancestors back to a unique lca"""
1498
cur_ancestors = (self.a_key, self.b_key)
1499
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1500
# rather than a key tuple. We will just map that directly to no common
1504
next_lcas = self.graph.find_lca(*cur_ancestors)
1505
# Map a plain NULL_REVISION to a simple no-ancestors
1506
if next_lcas == set([NULL_REVISION]):
1508
# Order the lca's based on when they were merged into the tip
1509
# While the actual merge portion of weave merge uses a set() of
1510
# active revisions, the order of insertion *does* effect the
1511
# implicit ordering of the texts.
1512
for rev_key in cur_ancestors:
1513
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1515
parent_map[rev_key] = ordered_parents
1516
if len(next_lcas) == 0:
1518
elif len(next_lcas) == 1:
1519
parent_map[list(next_lcas)[0]] = ()
1521
elif len(next_lcas) > 2:
1522
# More than 2 lca's, fall back to grabbing all nodes between
1523
# this and the unique lca.
1524
mutter('More than 2 LCAs, falling back to all nodes for:'
1525
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1526
cur_lcas = next_lcas
1527
while len(cur_lcas) > 1:
1528
cur_lcas = self.graph.find_lca(*cur_lcas)
1529
if len(cur_lcas) == 0:
1530
# No common base to find, use the full ancestry
1533
unique_lca = list(cur_lcas)[0]
1534
if unique_lca == NULL_REVISION:
1535
# find_lca will return a plain 'NULL_REVISION' rather
1536
# than a key tuple when there is no common ancestor, we
1537
# prefer to just use None, because it doesn't confuse
1538
# _get_interesting_texts()
1540
parent_map.update(self._find_unique_parents(next_lcas,
1543
cur_ancestors = next_lcas
1546
def _find_unique_parents(self, tip_keys, base_key):
1547
"""Find ancestors of tip that aren't ancestors of base.
1549
:param tip_keys: Nodes that are interesting
1550
:param base_key: Cull all ancestors of this node
1551
:return: The parent map for all revisions between tip_keys and
1552
base_key. base_key will be included. References to nodes outside of
1553
the ancestor set will also be removed.
1555
# TODO: this would be simpler if find_unique_ancestors took a list
1556
# instead of a single tip, internally it supports it, but it
1557
# isn't a "backwards compatible" api change.
1558
if base_key is None:
1559
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1560
# We remove NULL_REVISION because it isn't a proper tuple key, and
1561
# thus confuses things like _get_interesting_texts, and our logic
1562
# to add the texts into the memory weave.
1563
if NULL_REVISION in parent_map:
1564
parent_map.pop(NULL_REVISION)
1567
for tip in tip_keys:
1569
self.graph.find_unique_ancestors(tip, [base_key]))
1570
parent_map = self.graph.get_parent_map(interesting)
1571
parent_map[base_key] = ()
1572
culled_parent_map, child_map, tails = self._remove_external_references(
1574
# Remove all the tails but base_key
1575
if base_key is not None:
1576
tails.remove(base_key)
1577
self._prune_tails(culled_parent_map, child_map, tails)
1578
# Now remove all the uninteresting 'linear' regions
1579
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1583
def _remove_external_references(parent_map):
1584
"""Remove references that go outside of the parent map.
1586
:param parent_map: Something returned from Graph.get_parent_map(keys)
1587
:return: (filtered_parent_map, child_map, tails)
1588
filtered_parent_map is parent_map without external references
1589
child_map is the {parent_key: [child_keys]} mapping
1590
tails is a list of nodes that do not have any parents in the map
1592
# TODO: The basic effect of this function seems more generic than
1593
# _PlanMerge. But the specific details of building a child_map,
1594
# and computing tails seems very specific to _PlanMerge.
1595
# Still, should this be in Graph land?
1596
filtered_parent_map = {}
1599
for key, parent_keys in parent_map.iteritems():
1600
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1601
if not culled_parent_keys:
1603
for parent_key in culled_parent_keys:
1604
child_map.setdefault(parent_key, []).append(key)
1605
# TODO: Do we want to do this, it adds overhead for every node,
1606
# just to say that the node has no children
1607
child_map.setdefault(key, [])
1608
filtered_parent_map[key] = culled_parent_keys
1609
return filtered_parent_map, child_map, tails
1612
def _prune_tails(parent_map, child_map, tails_to_remove):
1613
"""Remove tails from the parent map.
1615
This will remove the supplied revisions until no more children have 0
1618
:param parent_map: A dict of {child: [parents]}, this dictionary will
1619
be modified in place.
1620
:param tails_to_remove: A list of tips that should be removed,
1621
this list will be consumed
1622
:param child_map: The reverse dict of parent_map ({parent: [children]})
1623
this dict will be modified
1624
:return: None, parent_map will be modified in place.
1626
while tails_to_remove:
1627
next = tails_to_remove.pop()
1628
parent_map.pop(next)
1629
children = child_map.pop(next)
1630
for child in children:
1631
child_parents = parent_map[child]
1632
child_parents.remove(next)
1633
if len(child_parents) == 0:
1634
tails_to_remove.append(child)
1636
def _get_interesting_texts(self, parent_map):
1637
"""Return a dict of texts we are interested in.
1639
Note that the input is in key tuples, but the output is in plain
1642
:param parent_map: The output from _find_recursive_lcas
1643
:return: A dict of {'revision_id':lines} as returned by
1644
_PlanMergeBase.get_lines()
1646
all_revision_keys = set(parent_map)
1647
all_revision_keys.add(self.a_key)
1648
all_revision_keys.add(self.b_key)
1650
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1651
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1654
def _build_weave(self):
1655
from bzrlib import weave
1656
self._weave = weave.Weave(weave_name='in_memory_weave',
1657
allow_reserved=True)
1658
parent_map = self._find_recursive_lcas()
1660
all_texts = self._get_interesting_texts(parent_map)
1662
# Note: Unfortunately, the order given by topo_sort will effect the
1663
# ordering resolution in the output. Specifically, if you add A then B,
1664
# then in the output text A lines will show up before B lines. And, of
1665
# course, topo_sort doesn't guarantee any real ordering.
1666
# So we use merge_sort, and add a fake node on the tip.
1667
# This ensures that left-hand parents will always be inserted into the
1668
# weave before right-hand parents.
1669
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1670
parent_map[tip_key] = (self.a_key, self.b_key)
1672
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1676
# for key in tsort.topo_sort(parent_map):
1677
parent_keys = parent_map[key]
1678
revision_id = key[-1]
1679
parent_ids = [k[-1] for k in parent_keys]
1680
self._weave.add_lines(revision_id, parent_ids,
1681
all_texts[revision_id])
1683
def plan_merge(self):
1684
"""Generate a 'plan' for merging the two revisions.
1686
This involves comparing their texts and determining the cause of
1687
differences. If text A has a line and text B does not, then either the
1688
line was added to text A, or it was deleted from B. Once the causes
1689
are combined, they are written out in the format described in
1690
VersionedFile.plan_merge
1692
if self._head_key is not None: # There was a single head
1693
if self._head_key == self.a_key:
1696
if self._head_key != self.b_key:
1697
raise AssertionError('There was an invalid head: %s != %s'
1698
% (self.b_key, self._head_key))
1700
head_rev = self._head_key[-1]
1701
lines = self.get_lines([head_rev])[head_rev]
1702
return ((plan, line) for line in lines)
1703
return self._weave.plan_merge(self.a_rev, self.b_rev)
1706
class _PlanLCAMerge(_PlanMergeBase):
1708
This merge algorithm differs from _PlanMerge in that:
1709
1. comparisons are done against LCAs only
1710
2. cases where a contested line is new versus one LCA but old versus
1711
another are marked as conflicts, by emitting the line as conflicted-a
1714
This is faster, and hopefully produces more useful output.
1717
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1718
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1719
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1722
if lca == NULL_REVISION:
1725
self.lcas.add(lca[-1])
1726
for lca in self.lcas:
1727
if _mod_revision.is_null(lca):
1730
lca_lines = self.get_lines([lca])[lca]
1731
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1733
blocks = list(matcher.get_matching_blocks())
1734
self._cached_matching_blocks[(a_rev, lca)] = blocks
1735
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1737
blocks = list(matcher.get_matching_blocks())
1738
self._cached_matching_blocks[(b_rev, lca)] = blocks
1740
def _determine_status(self, revision_id, unique_line_numbers):
1741
"""Determines the status unique lines versus all lcas.
1743
Basically, determines why the line is unique to this revision.
1745
A line may be determined new, killed, or both.
1747
If a line is determined new, that means it was not present in at least
1748
one LCA, and is not present in the other merge revision.
1750
If a line is determined killed, that means the line was present in
1753
If a line is killed and new, this indicates that the two merge
1754
revisions contain differing conflict resolutions.
1755
:param revision_id: The id of the revision in which the lines are
1757
:param unique_line_numbers: The line numbers of unique lines.
1758
:return a tuple of (new_this, killed_other):
1762
unique_line_numbers = set(unique_line_numbers)
1763
for lca in self.lcas:
1764
blocks = self._get_matching_blocks(revision_id, lca)
1765
unique_vs_lca, _ignored = self._unique_lines(blocks)
1766
new.update(unique_line_numbers.intersection(unique_vs_lca))
1767
killed.update(unique_line_numbers.difference(unique_vs_lca))