1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
from itertools import chain
29
revision as _mod_revision,
31
from bzrlib.branch import Branch
32
from bzrlib.conflicts import ConflictList, Conflict
33
from bzrlib.errors import (BzrCommandError,
43
WorkingTreeNotRevision,
46
from bzrlib.graph import Graph
47
from bzrlib.merge3 import Merge3
48
from bzrlib.osutils import rename, pathjoin
49
from progress import DummyProgress, ProgressPhase
50
from bzrlib.revision import (NULL_REVISION, ensure_null)
51
from bzrlib.textfile import check_text_lines
52
from bzrlib.trace import mutter, warning, note, is_quiet
53
from bzrlib.transform import (TransformPreview, TreeTransform,
54
resolve_conflicts, cook_conflicts,
55
conflict_pass, FinalPaths, create_by_entry,
56
unique_add, ROOT_PARENT)
57
from bzrlib.versionedfile import PlanWeaveMerge
60
# TODO: Report back as changes are merged in
63
def transform_tree(from_tree, to_tree, interesting_ids=None):
64
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
65
interesting_ids=interesting_ids, this_tree=from_tree)
69
def __init__(self, this_branch, other_tree=None, base_tree=None,
70
this_tree=None, pb=DummyProgress(), change_reporter=None,
71
recurse='down', revision_graph=None):
73
self.this_branch = this_branch
74
self.this_basis = _mod_revision.ensure_null(
75
this_branch.last_revision())
76
self.this_rev_id = None
77
self.this_tree = this_tree
78
self.this_revision_tree = None
79
self.this_basis_tree = None
80
self.other_tree = other_tree
81
self.other_branch = None
82
self.base_tree = base_tree
83
self.ignore_zero = False
84
self.backup_files = False
85
self.interesting_ids = None
86
self.interesting_files = None
87
self.show_base = False
88
self.reprocess = False
91
self.recurse = recurse
92
self.change_reporter = change_reporter
93
self._cached_trees = {}
94
self._revision_graph = revision_graph
95
self._base_is_ancestor = None
96
self._base_is_other_ancestor = None
99
def revision_graph(self):
100
if self._revision_graph is None:
101
self._revision_graph = self.this_branch.repository.get_graph()
102
return self._revision_graph
104
def _set_base_is_ancestor(self, value):
105
self._base_is_ancestor = value
107
def _get_base_is_ancestor(self):
108
if self._base_is_ancestor is None:
109
self._base_is_ancestor = self.revision_graph.is_ancestor(
110
self.base_rev_id, self.this_basis)
111
return self._base_is_ancestor
113
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
115
def _set_base_is_other_ancestor(self, value):
116
self._base_is_other_ancestor = value
118
def _get_base_is_other_ancestor(self):
119
if self._base_is_other_ancestor is None:
120
if self.other_basis is None:
122
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
123
self.base_rev_id, self.other_basis)
124
return self._base_is_other_ancestor
126
base_is_other_ancestor = property(_get_base_is_other_ancestor,
127
_set_base_is_other_ancestor)
130
def from_uncommitted(tree, other_tree, pb):
131
"""Return a Merger for uncommitted changes in other_tree.
133
:param tree: The tree to merge into
134
:param other_tree: The tree to get uncommitted changes from
135
:param pb: A progress indicator
137
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
139
merger.base_rev_id = merger.base_tree.get_revision_id()
140
merger.other_rev_id = None
141
merger.other_basis = merger.base_rev_id
145
def from_mergeable(klass, tree, mergeable, pb):
146
"""Return a Merger for a bundle or merge directive.
148
:param tree: The tree to merge changes into
149
:param mergeable: A merge directive or bundle
150
:param pb: A progress indicator
152
mergeable.install_revisions(tree.branch.repository)
153
base_revision_id, other_revision_id, verified =\
154
mergeable.get_merge_request(tree.branch.repository)
155
revision_graph = tree.branch.repository.get_graph()
156
if base_revision_id is not None:
157
if (base_revision_id != _mod_revision.NULL_REVISION and
158
revision_graph.is_ancestor(
159
base_revision_id, tree.branch.last_revision())):
160
base_revision_id = None
162
warning('Performing cherrypick')
163
merger = klass.from_revision_ids(pb, tree, other_revision_id,
164
base_revision_id, revision_graph=
166
return merger, verified
169
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
170
base_branch=None, revision_graph=None):
171
"""Return a Merger for revision-ids.
173
:param tree: The tree to merge changes into
174
:param other: The revision-id to use as OTHER
175
:param base: The revision-id to use as BASE. If not specified, will
177
:param other_branch: A branch containing the other revision-id. If
178
not supplied, tree.branch is used.
179
:param base_branch: A branch containing the base revision-id. If
180
not supplied, other_branch or tree.branch will be used.
181
:param revision_graph: If you have a revision_graph precomputed, pass
182
it in, otherwise it will be created for you.
183
:param pb: A progress indicator
185
merger = Merger(tree.branch, this_tree=tree, pb=pb,
186
revision_graph=revision_graph)
187
if other_branch is None:
188
other_branch = tree.branch
189
merger.set_other_revision(other, other_branch)
193
if base_branch is None:
194
base_branch = other_branch
195
merger.set_base_revision(base, base_branch)
198
def revision_tree(self, revision_id, branch=None):
199
if revision_id not in self._cached_trees:
201
branch = self.this_branch
203
tree = self.this_tree.revision_tree(revision_id)
204
except errors.NoSuchRevisionInTree:
205
tree = branch.repository.revision_tree(revision_id)
206
self._cached_trees[revision_id] = tree
207
return self._cached_trees[revision_id]
209
def _get_tree(self, treespec, possible_transports=None):
210
from bzrlib import workingtree
211
location, revno = treespec
213
tree = workingtree.WorkingTree.open_containing(location)[0]
214
return tree.branch, tree
215
branch = Branch.open_containing(location, possible_transports)[0]
217
revision_id = branch.last_revision()
219
revision_id = branch.get_rev_id(revno)
220
revision_id = ensure_null(revision_id)
221
return branch, self.revision_tree(revision_id, branch)
223
def ensure_revision_trees(self):
224
if self.this_revision_tree is None:
225
self.this_basis_tree = self.revision_tree(self.this_basis)
226
if self.this_basis == self.this_rev_id:
227
self.this_revision_tree = self.this_basis_tree
229
if self.other_rev_id is None:
230
other_basis_tree = self.revision_tree(self.other_basis)
231
changes = other_basis_tree.changes_from(self.other_tree)
232
if changes.has_changed():
233
raise WorkingTreeNotRevision(self.this_tree)
234
other_rev_id = self.other_basis
235
self.other_tree = other_basis_tree
237
def file_revisions(self, file_id):
238
self.ensure_revision_trees()
239
def get_id(tree, file_id):
240
revision_id = tree.inventory[file_id].revision
242
if self.this_rev_id is None:
243
if self.this_basis_tree.get_file_sha1(file_id) != \
244
self.this_tree.get_file_sha1(file_id):
245
raise WorkingTreeNotRevision(self.this_tree)
247
trees = (self.this_basis_tree, self.other_tree)
248
return [get_id(tree, file_id) for tree in trees]
250
def check_basis(self, check_clean, require_commits=True):
251
if self.this_basis is None and require_commits is True:
252
raise BzrCommandError("This branch has no commits."
253
" (perhaps you would prefer 'bzr pull')")
256
if self.this_basis != self.this_rev_id:
257
raise errors.UncommittedChanges(self.this_tree)
259
def compare_basis(self):
261
basis_tree = self.revision_tree(self.this_tree.last_revision())
262
except errors.NoSuchRevision:
263
basis_tree = self.this_tree.basis_tree()
264
changes = self.this_tree.changes_from(basis_tree)
265
if not changes.has_changed():
266
self.this_rev_id = self.this_basis
268
def set_interesting_files(self, file_list):
269
self.interesting_files = file_list
271
def set_pending(self):
272
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
276
def _add_parent(self):
277
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
278
new_parent_trees = []
279
for revision_id in new_parents:
281
tree = self.revision_tree(revision_id)
282
except errors.NoSuchRevision:
286
new_parent_trees.append((revision_id, tree))
288
self.this_tree.set_parent_trees(new_parent_trees,
289
allow_leftmost_as_ghost=True)
291
for _revision_id, tree in new_parent_trees:
295
def set_other(self, other_revision, possible_transports=None):
296
"""Set the revision and tree to merge from.
298
This sets the other_tree, other_rev_id, other_basis attributes.
300
:param other_revision: The [path, revision] list to merge from.
302
self.other_branch, self.other_tree = self._get_tree(other_revision,
304
if other_revision[1] == -1:
305
self.other_rev_id = _mod_revision.ensure_null(
306
self.other_branch.last_revision())
307
if _mod_revision.is_null(self.other_rev_id):
308
raise NoCommits(self.other_branch)
309
self.other_basis = self.other_rev_id
310
elif other_revision[1] is not None:
311
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
312
self.other_basis = self.other_rev_id
314
self.other_rev_id = None
315
self.other_basis = self.other_branch.last_revision()
316
if self.other_basis is None:
317
raise NoCommits(self.other_branch)
318
if self.other_rev_id is not None:
319
self._cached_trees[self.other_rev_id] = self.other_tree
320
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
322
def set_other_revision(self, revision_id, other_branch):
323
"""Set 'other' based on a branch and revision id
325
:param revision_id: The revision to use for a tree
326
:param other_branch: The branch containing this tree
328
self.other_rev_id = revision_id
329
self.other_branch = other_branch
330
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
331
self.other_tree = self.revision_tree(revision_id)
332
self.other_basis = revision_id
334
def set_base_revision(self, revision_id, branch):
335
"""Set 'base' based on a branch and revision id
337
:param revision_id: The revision to use for a tree
338
:param branch: The branch containing this tree
340
self.base_rev_id = revision_id
341
self.base_branch = branch
342
self._maybe_fetch(branch, self.this_branch, revision_id)
343
self.base_tree = self.revision_tree(revision_id)
345
def _maybe_fetch(self, source, target, revision_id):
346
if not source.repository.has_same_location(target.repository):
347
target.fetch(source, revision_id)
350
revisions = [ensure_null(self.this_basis),
351
ensure_null(self.other_basis)]
352
if NULL_REVISION in revisions:
353
self.base_rev_id = NULL_REVISION
355
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
356
revisions[0], revisions[1], count_steps=True)
357
if self.base_rev_id == NULL_REVISION:
358
raise UnrelatedBranches()
360
warning('Warning: criss-cross merge encountered. See bzr'
361
' help criss-cross.')
362
self.base_tree = self.revision_tree(self.base_rev_id)
363
self.base_is_ancestor = True
364
self.base_is_other_ancestor = True
366
def set_base(self, base_revision):
367
"""Set the base revision to use for the merge.
369
:param base_revision: A 2-list containing a path and revision number.
371
mutter("doing merge() with no base_revision specified")
372
if base_revision == [None, None]:
375
base_branch, self.base_tree = self._get_tree(base_revision)
376
if base_revision[1] == -1:
377
self.base_rev_id = base_branch.last_revision()
378
elif base_revision[1] is None:
379
self.base_rev_id = _mod_revision.NULL_REVISION
381
self.base_rev_id = _mod_revision.ensure_null(
382
base_branch.get_rev_id(base_revision[1]))
383
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
385
def make_merger(self):
386
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
387
'other_tree': self.other_tree,
388
'interesting_ids': self.interesting_ids,
389
'interesting_files': self.interesting_files,
392
if self.merge_type.requires_base:
393
kwargs['base_tree'] = self.base_tree
394
if self.merge_type.supports_reprocess:
395
kwargs['reprocess'] = self.reprocess
397
raise BzrError("Conflict reduction is not supported for merge"
398
" type %s." % self.merge_type)
399
if self.merge_type.supports_show_base:
400
kwargs['show_base'] = self.show_base
402
raise BzrError("Showing base is not supported for this"
403
" merge type. %s" % self.merge_type)
404
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
405
and not self.base_is_other_ancestor):
406
raise errors.CannotReverseCherrypick()
407
if self.merge_type.supports_cherrypick:
408
kwargs['cherrypick'] = (not self.base_is_ancestor or
409
not self.base_is_other_ancestor)
410
return self.merge_type(pb=self._pb,
411
change_reporter=self.change_reporter,
415
self.this_tree.lock_tree_write()
416
if self.base_tree is not None:
417
self.base_tree.lock_read()
418
if self.other_tree is not None:
419
self.other_tree.lock_read()
421
merge = self.make_merger()
423
if self.recurse == 'down':
424
for relpath, file_id in self.this_tree.iter_references():
425
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
426
other_revision = self.other_tree.get_reference_revision(
428
if other_revision == sub_tree.last_revision():
430
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
431
sub_merge.merge_type = self.merge_type
432
other_branch = self.other_branch.reference_parent(file_id, relpath)
433
sub_merge.set_other_revision(other_revision, other_branch)
434
base_revision = self.base_tree.get_reference_revision(file_id)
435
sub_merge.base_tree = \
436
sub_tree.branch.repository.revision_tree(base_revision)
437
sub_merge.base_rev_id = base_revision
441
if self.other_tree is not None:
442
self.other_tree.unlock()
443
if self.base_tree is not None:
444
self.base_tree.unlock()
445
self.this_tree.unlock()
446
if len(merge.cooked_conflicts) == 0:
447
if not self.ignore_zero and not is_quiet():
448
note("All changes applied successfully.")
450
note("%d conflicts encountered." % len(merge.cooked_conflicts))
452
return len(merge.cooked_conflicts)
455
class Merge3Merger(object):
456
"""Three-way merger that uses the merge3 text merger"""
458
supports_reprocess = True
459
supports_show_base = True
460
history_based = False
461
supports_cherrypick = True
462
supports_reverse_cherrypick = True
463
winner_idx = {"this": 2, "other": 1, "conflict": 1}
465
def __init__(self, working_tree, this_tree, base_tree, other_tree,
466
interesting_ids=None, reprocess=False, show_base=False,
467
pb=DummyProgress(), pp=None, change_reporter=None,
468
interesting_files=None, do_merge=True,
470
"""Initialize the merger object and perform the merge.
472
:param working_tree: The working tree to apply the merge to
473
:param this_tree: The local tree in the merge operation
474
:param base_tree: The common tree in the merge operation
475
:param other_tree: The other other tree to merge changes from
476
:param interesting_ids: The file_ids of files that should be
477
participate in the merge. May not be combined with
479
:param: reprocess If True, perform conflict-reduction processing.
480
:param show_base: If True, show the base revision in text conflicts.
481
(incompatible with reprocess)
482
:param pb: A Progress bar
483
:param pp: A ProgressPhase object
484
:param change_reporter: An object that should report changes made
485
:param interesting_files: The tree-relative paths of files that should
486
participate in the merge. If these paths refer to directories,
487
the contents of those directories will also be included. May not
488
be combined with interesting_ids. If neither interesting_files nor
489
interesting_ids is specified, all files may participate in the
492
object.__init__(self)
493
if interesting_files is not None and interesting_ids is not None:
495
'specify either interesting_ids or interesting_files')
496
self.interesting_ids = interesting_ids
497
self.interesting_files = interesting_files
498
self.this_tree = working_tree
499
self.base_tree = base_tree
500
self.other_tree = other_tree
501
self._raw_conflicts = []
502
self.cooked_conflicts = []
503
self.reprocess = reprocess
504
self.show_base = show_base
507
self.change_reporter = change_reporter
508
self.cherrypick = cherrypick
510
self.pp = ProgressPhase("Merge phase", 3, self.pb)
515
self.this_tree.lock_tree_write()
516
self.base_tree.lock_read()
517
self.other_tree.lock_read()
518
self.tt = TreeTransform(self.this_tree, self.pb)
521
self._compute_transform()
523
results = self.tt.apply(no_conflicts=True)
524
self.write_modified(results)
526
self.this_tree.add_conflicts(self.cooked_conflicts)
527
except UnsupportedOperation:
531
self.other_tree.unlock()
532
self.base_tree.unlock()
533
self.this_tree.unlock()
536
def make_preview_transform(self):
537
self.base_tree.lock_read()
538
self.other_tree.lock_read()
539
self.tt = TransformPreview(self.this_tree)
542
self._compute_transform()
545
self.other_tree.unlock()
546
self.base_tree.unlock()
550
def _compute_transform(self):
551
entries = self._entries3()
552
child_pb = ui.ui_factory.nested_progress_bar()
554
for num, (file_id, changed, parents3, names3,
555
executable3) in enumerate(entries):
556
child_pb.update('Preparing file merge', num, len(entries))
557
self._merge_names(file_id, parents3, names3)
559
file_status = self.merge_contents(file_id)
561
file_status = 'unmodified'
562
self._merge_executable(file_id,
563
executable3, file_status)
568
child_pb = ui.ui_factory.nested_progress_bar()
570
fs_conflicts = resolve_conflicts(self.tt, child_pb,
571
lambda t, c: conflict_pass(t, c, self.other_tree))
574
if self.change_reporter is not None:
575
from bzrlib import delta
576
delta.report_changes(
577
self.tt.iter_changes(), self.change_reporter)
578
self.cook_conflicts(fs_conflicts)
579
for conflict in self.cooked_conflicts:
583
"""Gather data about files modified between three trees.
585
Return a list of tuples of file_id, changed, parents3, names3,
586
executable3. changed is a boolean indicating whether the file contents
587
or kind were changed. parents3 is a tuple of parent ids for base,
588
other and this. names3 is a tuple of names for base, other and this.
589
executable3 is a tuple of execute-bit values for base, other and this.
592
iterator = self.other_tree.iter_changes(self.base_tree,
593
include_unchanged=True, specific_files=self.interesting_files,
594
extra_trees=[self.this_tree])
595
for (file_id, paths, changed, versioned, parents, names, kind,
596
executable) in iterator:
597
if (self.interesting_ids is not None and
598
file_id not in self.interesting_ids):
600
if file_id in self.this_tree.inventory:
601
entry = self.this_tree.inventory[file_id]
602
this_name = entry.name
603
this_parent = entry.parent_id
604
this_executable = entry.executable
608
this_executable = None
609
parents3 = parents + (this_parent,)
610
names3 = names + (this_name,)
611
executable3 = executable + (this_executable,)
612
result.append((file_id, changed, parents3, names3, executable3))
617
self.tt.final_kind(self.tt.root)
619
self.tt.cancel_deletion(self.tt.root)
620
if self.tt.final_file_id(self.tt.root) is None:
621
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
623
if self.other_tree.inventory.root is None:
625
other_root_file_id = self.other_tree.get_root_id()
626
other_root = self.tt.trans_id_file_id(other_root_file_id)
627
if other_root == self.tt.root:
630
self.tt.final_kind(other_root)
633
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
634
self.tt.cancel_creation(other_root)
635
self.tt.cancel_versioning(other_root)
637
def reparent_children(self, ie, target):
638
for thing, child in ie.children.iteritems():
639
trans_id = self.tt.trans_id_file_id(child.file_id)
640
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
642
def write_modified(self, results):
644
for path in results.modified_paths:
645
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
648
hash = self.this_tree.get_file_sha1(file_id)
651
modified_hashes[file_id] = hash
652
self.this_tree.set_merge_modified(modified_hashes)
655
def parent(entry, file_id):
656
"""Determine the parent for a file_id (used as a key method)"""
659
return entry.parent_id
662
def name(entry, file_id):
663
"""Determine the name for a file_id (used as a key method)"""
669
def contents_sha1(tree, file_id):
670
"""Determine the sha1 of the file contents (used as a key method)."""
671
if file_id not in tree:
673
return tree.get_file_sha1(file_id)
676
def executable(tree, file_id):
677
"""Determine the executability of a file-id (used as a key method)."""
678
if file_id not in tree:
680
if tree.kind(file_id) != "file":
682
return tree.is_executable(file_id)
685
def kind(tree, file_id):
686
"""Determine the kind of a file-id (used as a key method)."""
687
if file_id not in tree:
689
return tree.kind(file_id)
692
def _three_way(base, other, this):
693
#if base == other, either they all agree, or only THIS has changed.
696
elif this not in (base, other):
698
# "Ambiguous clean merge" -- both sides have made the same change.
701
# this == base: only other has changed.
706
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
707
"""Do a three-way test on a scalar.
708
Return "this", "other" or "conflict", depending whether a value wins.
710
key_base = key(base_tree, file_id)
711
key_other = key(other_tree, file_id)
712
#if base == other, either they all agree, or only THIS has changed.
713
if key_base == key_other:
715
key_this = key(this_tree, file_id)
716
# "Ambiguous clean merge"
717
if key_this == key_other:
719
elif key_this == key_base:
724
def merge_names(self, file_id):
726
if file_id in tree.inventory:
727
return tree.inventory[file_id]
730
this_entry = get_entry(self.this_tree)
731
other_entry = get_entry(self.other_tree)
732
base_entry = get_entry(self.base_tree)
733
entries = (base_entry, other_entry, this_entry)
736
for entry in entries:
741
names.append(entry.name)
742
parents.append(entry.parent_id)
743
return self._merge_names(file_id, parents, names)
745
def _merge_names(self, file_id, parents, names):
746
"""Perform a merge on file_id names and parents"""
747
base_name, other_name, this_name = names
748
base_parent, other_parent, this_parent = parents
750
name_winner = self._three_way(*names)
752
parent_id_winner = self._three_way(*parents)
753
if this_name is None:
754
if name_winner == "this":
755
name_winner = "other"
756
if parent_id_winner == "this":
757
parent_id_winner = "other"
758
if name_winner == "this" and parent_id_winner == "this":
760
if name_winner == "conflict":
761
trans_id = self.tt.trans_id_file_id(file_id)
762
self._raw_conflicts.append(('name conflict', trans_id,
763
this_name, other_name))
764
if parent_id_winner == "conflict":
765
trans_id = self.tt.trans_id_file_id(file_id)
766
self._raw_conflicts.append(('parent conflict', trans_id,
767
this_parent, other_parent))
768
if other_name is None:
769
# it doesn't matter whether the result was 'other' or
770
# 'conflict'-- if there's no 'other', we leave it alone.
772
# if we get here, name_winner and parent_winner are set to safe values.
773
trans_id = self.tt.trans_id_file_id(file_id)
774
parent_id = parents[self.winner_idx[parent_id_winner]]
775
if parent_id is not None:
776
parent_trans_id = self.tt.trans_id_file_id(parent_id)
777
self.tt.adjust_path(names[self.winner_idx[name_winner]],
778
parent_trans_id, trans_id)
780
def merge_contents(self, file_id):
781
"""Performa a merge on file_id contents."""
782
def contents_pair(tree):
783
if file_id not in tree:
785
kind = tree.kind(file_id)
787
contents = tree.get_file_sha1(file_id)
788
elif kind == "symlink":
789
contents = tree.get_symlink_target(file_id)
792
return kind, contents
794
def contents_conflict():
795
trans_id = self.tt.trans_id_file_id(file_id)
796
name = self.tt.final_name(trans_id)
797
parent_id = self.tt.final_parent(trans_id)
798
if file_id in self.this_tree.inventory:
799
self.tt.unversion_file(trans_id)
800
if file_id in self.this_tree:
801
self.tt.delete_contents(trans_id)
802
file_group = self._dump_conflicts(name, parent_id, file_id,
804
self._raw_conflicts.append(('contents conflict', file_group))
806
# See SPOT run. run, SPOT, run.
807
# So we're not QUITE repeating ourselves; we do tricky things with
809
base_pair = contents_pair(self.base_tree)
810
other_pair = contents_pair(self.other_tree)
811
if base_pair == other_pair:
812
# OTHER introduced no changes
814
this_pair = contents_pair(self.this_tree)
815
if this_pair == other_pair:
816
# THIS and OTHER introduced the same changes
819
trans_id = self.tt.trans_id_file_id(file_id)
820
if this_pair == base_pair:
821
# only OTHER introduced changes
822
if file_id in self.this_tree:
823
# Remove any existing contents
824
self.tt.delete_contents(trans_id)
825
if file_id in self.other_tree:
826
# OTHER changed the file
827
create_by_entry(self.tt,
828
self.other_tree.inventory[file_id],
829
self.other_tree, trans_id)
830
if file_id not in self.this_tree.inventory:
831
self.tt.version_file(file_id, trans_id)
833
elif file_id in self.this_tree.inventory:
834
# OTHER deleted the file
835
self.tt.unversion_file(trans_id)
837
#BOTH THIS and OTHER introduced changes; scalar conflict
838
elif this_pair[0] == "file" and other_pair[0] == "file":
839
# THIS and OTHER are both files, so text merge. Either
840
# BASE is a file, or both converted to files, so at least we
841
# have agreement that output should be a file.
843
self.text_merge(file_id, trans_id)
845
return contents_conflict()
846
if file_id not in self.this_tree.inventory:
847
self.tt.version_file(file_id, trans_id)
849
self.tt.tree_kind(trans_id)
850
self.tt.delete_contents(trans_id)
855
# Scalar conflict, can't text merge. Dump conflicts
856
return contents_conflict()
858
def get_lines(self, tree, file_id):
859
"""Return the lines in a file, or an empty list."""
861
return tree.get_file(file_id).readlines()
865
def text_merge(self, file_id, trans_id):
866
"""Perform a three-way text merge on a file_id"""
867
# it's possible that we got here with base as a different type.
868
# if so, we just want two-way text conflicts.
869
if file_id in self.base_tree and \
870
self.base_tree.kind(file_id) == "file":
871
base_lines = self.get_lines(self.base_tree, file_id)
874
other_lines = self.get_lines(self.other_tree, file_id)
875
this_lines = self.get_lines(self.this_tree, file_id)
876
m3 = Merge3(base_lines, this_lines, other_lines,
877
is_cherrypick=self.cherrypick)
878
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
879
if self.show_base is True:
880
base_marker = '|' * 7
884
def iter_merge3(retval):
885
retval["text_conflicts"] = False
886
for line in m3.merge_lines(name_a = "TREE",
887
name_b = "MERGE-SOURCE",
888
name_base = "BASE-REVISION",
889
start_marker=start_marker,
890
base_marker=base_marker,
891
reprocess=self.reprocess):
892
if line.startswith(start_marker):
893
retval["text_conflicts"] = True
894
yield line.replace(start_marker, '<' * 7)
898
merge3_iterator = iter_merge3(retval)
899
self.tt.create_file(merge3_iterator, trans_id)
900
if retval["text_conflicts"] is True:
901
self._raw_conflicts.append(('text conflict', trans_id))
902
name = self.tt.final_name(trans_id)
903
parent_id = self.tt.final_parent(trans_id)
904
file_group = self._dump_conflicts(name, parent_id, file_id,
905
this_lines, base_lines,
907
file_group.append(trans_id)
909
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
910
base_lines=None, other_lines=None, set_version=False,
912
"""Emit conflict files.
913
If this_lines, base_lines, or other_lines are omitted, they will be
914
determined automatically. If set_version is true, the .OTHER, .THIS
915
or .BASE (in that order) will be created as versioned files.
917
data = [('OTHER', self.other_tree, other_lines),
918
('THIS', self.this_tree, this_lines)]
920
data.append(('BASE', self.base_tree, base_lines))
923
for suffix, tree, lines in data:
925
trans_id = self._conflict_file(name, parent_id, tree, file_id,
927
file_group.append(trans_id)
928
if set_version and not versioned:
929
self.tt.version_file(file_id, trans_id)
933
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
935
"""Emit a single conflict file."""
936
name = name + '.' + suffix
937
trans_id = self.tt.create_path(name, parent_id)
938
entry = tree.inventory[file_id]
939
create_by_entry(self.tt, entry, tree, trans_id, lines)
942
def merge_executable(self, file_id, file_status):
943
"""Perform a merge on the execute bit."""
944
executable = [self.executable(t, file_id) for t in (self.base_tree,
945
self.other_tree, self.this_tree)]
946
self._merge_executable(file_id, executable, file_status)
948
def _merge_executable(self, file_id, executable, file_status):
949
"""Perform a merge on the execute bit."""
950
base_executable, other_executable, this_executable = executable
951
if file_status == "deleted":
953
winner = self._three_way(*executable)
954
if winner == "conflict":
955
# There must be a None in here, if we have a conflict, but we
956
# need executability since file status was not deleted.
957
if self.executable(self.other_tree, file_id) is None:
961
if winner == 'this' and file_status != "modified":
963
trans_id = self.tt.trans_id_file_id(file_id)
965
if self.tt.final_kind(trans_id) != "file":
970
executability = this_executable
972
if file_id in self.other_tree:
973
executability = other_executable
974
elif file_id in self.this_tree:
975
executability = this_executable
976
elif file_id in self.base_tree:
977
executability = base_executable
978
if executability is not None:
979
trans_id = self.tt.trans_id_file_id(file_id)
980
self.tt.set_executability(executability, trans_id)
982
def cook_conflicts(self, fs_conflicts):
983
"""Convert all conflicts into a form that doesn't depend on trans_id"""
984
from conflicts import Conflict
986
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
987
fp = FinalPaths(self.tt)
988
for conflict in self._raw_conflicts:
989
conflict_type = conflict[0]
990
if conflict_type in ('name conflict', 'parent conflict'):
991
trans_id = conflict[1]
992
conflict_args = conflict[2:]
993
if trans_id not in name_conflicts:
994
name_conflicts[trans_id] = {}
995
unique_add(name_conflicts[trans_id], conflict_type,
997
if conflict_type == 'contents conflict':
998
for trans_id in conflict[1]:
999
file_id = self.tt.final_file_id(trans_id)
1000
if file_id is not None:
1002
path = fp.get_path(trans_id)
1003
for suffix in ('.BASE', '.THIS', '.OTHER'):
1004
if path.endswith(suffix):
1005
path = path[:-len(suffix)]
1007
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1008
self.cooked_conflicts.append(c)
1009
if conflict_type == 'text conflict':
1010
trans_id = conflict[1]
1011
path = fp.get_path(trans_id)
1012
file_id = self.tt.final_file_id(trans_id)
1013
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1014
self.cooked_conflicts.append(c)
1016
for trans_id, conflicts in name_conflicts.iteritems():
1018
this_parent, other_parent = conflicts['parent conflict']
1019
if this_parent == other_parent:
1020
raise AssertionError()
1022
this_parent = other_parent = \
1023
self.tt.final_file_id(self.tt.final_parent(trans_id))
1025
this_name, other_name = conflicts['name conflict']
1026
if this_name == other_name:
1027
raise AssertionError()
1029
this_name = other_name = self.tt.final_name(trans_id)
1030
other_path = fp.get_path(trans_id)
1031
if this_parent is not None and this_name is not None:
1032
this_parent_path = \
1033
fp.get_path(self.tt.trans_id_file_id(this_parent))
1034
this_path = pathjoin(this_parent_path, this_name)
1036
this_path = "<deleted>"
1037
file_id = self.tt.final_file_id(trans_id)
1038
c = Conflict.factory('path conflict', path=this_path,
1039
conflict_path=other_path, file_id=file_id)
1040
self.cooked_conflicts.append(c)
1041
self.cooked_conflicts.sort(key=Conflict.sort_key)
1044
class WeaveMerger(Merge3Merger):
1045
"""Three-way tree merger, text weave merger."""
1046
supports_reprocess = True
1047
supports_show_base = False
1048
supports_reverse_cherrypick = False
1049
history_based = True
1051
def _merged_lines(self, file_id):
1052
"""Generate the merged lines.
1053
There is no distinction between lines that are meant to contain <<<<<<<
1057
base = self.base_tree
1060
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1062
if 'merge' in debug.debug_flags:
1064
trans_id = self.tt.trans_id_file_id(file_id)
1065
name = self.tt.final_name(trans_id) + '.plan'
1066
contents = ('%10s|%s' % l for l in plan)
1067
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1068
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1069
'>>>>>>> MERGE-SOURCE\n')
1070
return textmerge.merge_lines(self.reprocess)
1072
def text_merge(self, file_id, trans_id):
1073
"""Perform a (weave) text merge for a given file and file-id.
1074
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1075
and a conflict will be noted.
1077
lines, conflicts = self._merged_lines(file_id)
1079
# Note we're checking whether the OUTPUT is binary in this case,
1080
# because we don't want to get into weave merge guts.
1081
check_text_lines(lines)
1082
self.tt.create_file(lines, trans_id)
1084
self._raw_conflicts.append(('text conflict', trans_id))
1085
name = self.tt.final_name(trans_id)
1086
parent_id = self.tt.final_parent(trans_id)
1087
file_group = self._dump_conflicts(name, parent_id, file_id,
1089
file_group.append(trans_id)
1092
class LCAMerger(WeaveMerger):
1094
def _merged_lines(self, file_id):
1095
"""Generate the merged lines.
1096
There is no distinction between lines that are meant to contain <<<<<<<
1100
base = self.base_tree
1103
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1105
if 'merge' in debug.debug_flags:
1107
trans_id = self.tt.trans_id_file_id(file_id)
1108
name = self.tt.final_name(trans_id) + '.plan'
1109
contents = ('%10s|%s' % l for l in plan)
1110
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1111
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1112
'>>>>>>> MERGE-SOURCE\n')
1113
return textmerge.merge_lines(self.reprocess)
1116
class Diff3Merger(Merge3Merger):
1117
"""Three-way merger using external diff3 for text merging"""
1119
def dump_file(self, temp_dir, name, tree, file_id):
1120
out_path = pathjoin(temp_dir, name)
1121
out_file = open(out_path, "wb")
1123
in_file = tree.get_file(file_id)
1124
for line in in_file:
1125
out_file.write(line)
1130
def text_merge(self, file_id, trans_id):
1131
"""Perform a diff3 merge using a specified file-id and trans-id.
1132
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1133
will be dumped, and a will be conflict noted.
1136
temp_dir = osutils.mkdtemp(prefix="bzr-")
1138
new_file = pathjoin(temp_dir, "new")
1139
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1140
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1141
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1142
status = bzrlib.patch.diff3(new_file, this, base, other)
1143
if status not in (0, 1):
1144
raise BzrError("Unhandled diff3 exit code")
1145
f = open(new_file, 'rb')
1147
self.tt.create_file(f, trans_id)
1151
name = self.tt.final_name(trans_id)
1152
parent_id = self.tt.final_parent(trans_id)
1153
self._dump_conflicts(name, parent_id, file_id)
1154
self._raw_conflicts.append(('text conflict', trans_id))
1156
osutils.rmtree(temp_dir)
1159
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1161
merge_type=Merge3Merger,
1162
interesting_ids=None,
1166
interesting_files=None,
1169
change_reporter=None):
1170
"""Primary interface for merging.
1172
typical use is probably
1173
'merge_inner(branch, branch.get_revision_tree(other_revision),
1174
branch.get_revision_tree(base_revision))'
1176
if this_tree is None:
1177
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1178
"parameter as of bzrlib version 0.8.")
1179
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1180
pb=pb, change_reporter=change_reporter)
1181
merger.backup_files = backup_files
1182
merger.merge_type = merge_type
1183
merger.interesting_ids = interesting_ids
1184
merger.ignore_zero = ignore_zero
1185
if interesting_files:
1187
raise ValueError('Only supply interesting_ids'
1188
' or interesting_files')
1189
merger.interesting_files = interesting_files
1190
merger.show_base = show_base
1191
merger.reprocess = reprocess
1192
merger.other_rev_id = other_rev_id
1193
merger.other_basis = other_rev_id
1194
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1195
if get_revision_id is None:
1196
get_revision_id = base_tree.last_revision
1197
merger.set_base_revision(get_revision_id(), this_branch)
1198
return merger.do_merge()
1200
def get_merge_type_registry():
1201
"""Merge type registry is in bzrlib.option to avoid circular imports.
1203
This method provides a sanctioned way to retrieve it.
1205
from bzrlib import option
1206
return option._merge_type_registry
1209
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1210
def status_a(revision, text):
1211
if revision in ancestors_b:
1212
return 'killed-b', text
1214
return 'new-a', text
1216
def status_b(revision, text):
1217
if revision in ancestors_a:
1218
return 'killed-a', text
1220
return 'new-b', text
1222
plain_a = [t for (a, t) in annotated_a]
1223
plain_b = [t for (a, t) in annotated_b]
1224
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1225
blocks = matcher.get_matching_blocks()
1228
for ai, bi, l in blocks:
1229
# process all mismatched sections
1230
# (last mismatched section is handled because blocks always
1231
# includes a 0-length last block)
1232
for revision, text in annotated_a[a_cur:ai]:
1233
yield status_a(revision, text)
1234
for revision, text in annotated_b[b_cur:bi]:
1235
yield status_b(revision, text)
1236
# and now the matched section
1239
for text_a in plain_a[ai:a_cur]:
1240
yield "unchanged", text_a
1243
class _PlanMergeBase(object):
1245
def __init__(self, a_rev, b_rev, vf, key_prefix):
1248
:param a_rev: Revision-id of one revision to merge
1249
:param b_rev: Revision-id of the other revision to merge
1250
:param vf: A VersionedFiles containing both revisions
1251
:param key_prefix: A prefix for accessing keys in vf, typically
1257
self._last_lines = None
1258
self._last_lines_revision_id = None
1259
self._cached_matching_blocks = {}
1260
self._key_prefix = key_prefix
1261
lines = self.get_lines([a_rev, b_rev])
1262
self.lines_a = lines[a_rev]
1263
self.lines_b = lines[b_rev]
1265
def get_lines(self, revisions):
1266
"""Get lines for revisions from the backing VersionedFiles.
1268
:raises RevisionNotPresent: on absent texts.
1270
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1272
for record in self.vf.get_record_stream(keys, 'unordered', True):
1273
if record.storage_kind == 'absent':
1274
raise errors.RevisionNotPresent(record.key, self.vf)
1275
result[record.key[-1]] = osutils.split_lines(
1276
record.get_bytes_as('fulltext'))
1279
def plan_merge(self):
1280
"""Generate a 'plan' for merging the two revisions.
1282
This involves comparing their texts and determining the cause of
1283
differences. If text A has a line and text B does not, then either the
1284
line was added to text A, or it was deleted from B. Once the causes
1285
are combined, they are written out in the format described in
1286
VersionedFile.plan_merge
1288
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1289
unique_a, unique_b = self._unique_lines(blocks)
1290
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1291
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1292
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1294
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1297
for i, j, n in blocks:
1298
for a_index in range(last_i, i):
1299
if a_index in new_a:
1300
if a_index in killed_b:
1301
yield 'conflicted-a', self.lines_a[a_index]
1303
yield 'new-a', self.lines_a[a_index]
1305
yield 'killed-b', self.lines_a[a_index]
1306
for b_index in range(last_j, j):
1307
if b_index in new_b:
1308
if b_index in killed_a:
1309
yield 'conflicted-b', self.lines_b[b_index]
1311
yield 'new-b', self.lines_b[b_index]
1313
yield 'killed-a', self.lines_b[b_index]
1314
# handle common lines
1315
for a_index in range(i, i+n):
1316
yield 'unchanged', self.lines_a[a_index]
1320
def _get_matching_blocks(self, left_revision, right_revision):
1321
"""Return a description of which sections of two revisions match.
1323
See SequenceMatcher.get_matching_blocks
1325
cached = self._cached_matching_blocks.get((left_revision,
1327
if cached is not None:
1329
if self._last_lines_revision_id == left_revision:
1330
left_lines = self._last_lines
1331
right_lines = self.get_lines([right_revision])[right_revision]
1333
lines = self.get_lines([left_revision, right_revision])
1334
left_lines = lines[left_revision]
1335
right_lines = lines[right_revision]
1336
self._last_lines = right_lines
1337
self._last_lines_revision_id = right_revision
1338
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1340
return matcher.get_matching_blocks()
1342
def _unique_lines(self, matching_blocks):
1343
"""Analyse matching_blocks to determine which lines are unique
1345
:return: a tuple of (unique_left, unique_right), where the values are
1346
sets of line numbers of unique lines.
1352
for i, j, n in matching_blocks:
1353
unique_left.extend(range(last_i, i))
1354
unique_right.extend(range(last_j, j))
1357
return unique_left, unique_right
1360
def _subtract_plans(old_plan, new_plan):
1361
"""Remove changes from new_plan that came from old_plan.
1363
It is assumed that the difference between the old_plan and new_plan
1364
is their choice of 'b' text.
1366
All lines from new_plan that differ from old_plan are emitted
1367
verbatim. All lines from new_plan that match old_plan but are
1368
not about the 'b' revision are emitted verbatim.
1370
Lines that match and are about the 'b' revision are the lines we
1371
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1372
is skipped entirely.
1374
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1377
for i, j, n in matcher.get_matching_blocks():
1378
for jj in range(last_j, j):
1380
for jj in range(j, j+n):
1381
plan_line = new_plan[jj]
1382
if plan_line[0] == 'new-b':
1384
elif plan_line[0] == 'killed-b':
1385
yield 'unchanged', plan_line[1]
1391
class _PlanMerge(_PlanMergeBase):
1392
"""Plan an annotate merge using on-the-fly annotation"""
1394
def __init__(self, a_rev, b_rev, vf, key_prefix):
1395
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1397
# XXX: There is probably a better API to use to examine less history.
1398
a_ancestry = set(chain(*graph._make_breadth_first_searcher(
1399
[key_prefix + (a_rev,)])))
1400
b_ancestry = set(chain(*graph._make_breadth_first_searcher(
1401
[key_prefix + (b_rev,)])))
1402
self.uncommon = set(key[-1] for key in
1403
a_ancestry.symmetric_difference(b_ancestry))
1405
def _determine_status(self, revision_id, unique_line_numbers):
1406
"""Determines the status unique lines versus all lcas.
1408
Basically, determines why the line is unique to this revision.
1410
A line may be determined new or killed, but not both.
1412
:param revision_id: The id of the revision in which the lines are
1414
:param unique_line_numbers: The line numbers of unique lines.
1415
:return a tuple of (new_this, killed_other):
1417
new = self._find_new(revision_id)
1418
killed = set(unique_line_numbers).difference(new)
1421
def _find_new(self, version_id):
1422
"""Determine which lines are new in the ancestry of this version.
1424
If a lines is present in this version, and not present in any
1425
common ancestor, it is considered new.
1427
if version_id not in self.uncommon:
1429
key = self._key_prefix + (version_id,)
1430
parent_map = self.vf.get_parent_map([key])
1431
parents = tuple(parent[-1] for parent in parent_map[key])
1432
if len(parents) == 0:
1433
return set(range(len(self.get_lines([version_id])[version_id])))
1435
for parent in parents:
1436
blocks = self._get_matching_blocks(version_id, parent)
1437
result, unused = self._unique_lines(blocks)
1438
parent_new = self._find_new(parent)
1439
for i, j, n in blocks:
1440
for ii, jj in [(i+r, j+r) for r in range(n)]:
1441
if jj in parent_new:
1446
new.intersection_update(result)
1450
class _PlanLCAMerge(_PlanMergeBase):
1452
This merge algorithm differs from _PlanMerge in that:
1453
1. comparisons are done against LCAs only
1454
2. cases where a contested line is new versus one LCA but old versus
1455
another are marked as conflicts, by emitting the line as conflicted-a
1458
This is faster, and hopefully produces more useful output.
1461
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1462
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1463
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1466
if lca == NULL_REVISION:
1469
self.lcas.add(lca[-1])
1470
for lca in self.lcas:
1471
if _mod_revision.is_null(lca):
1474
lca_lines = self.get_lines([lca])[lca]
1475
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1477
blocks = list(matcher.get_matching_blocks())
1478
self._cached_matching_blocks[(a_rev, lca)] = blocks
1479
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1481
blocks = list(matcher.get_matching_blocks())
1482
self._cached_matching_blocks[(b_rev, lca)] = blocks
1484
def _determine_status(self, revision_id, unique_line_numbers):
1485
"""Determines the status unique lines versus all lcas.
1487
Basically, determines why the line is unique to this revision.
1489
A line may be determined new, killed, or both.
1491
If a line is determined new, that means it was not present in at least
1492
one LCA, and is not present in the other merge revision.
1494
If a line is determined killed, that means the line was present in
1497
If a line is killed and new, this indicates that the two merge
1498
revisions contain differing conflict resolutions.
1499
:param revision_id: The id of the revision in which the lines are
1501
:param unique_line_numbers: The line numbers of unique lines.
1502
:return a tuple of (new_this, killed_other):
1506
unique_line_numbers = set(unique_line_numbers)
1507
for lca in self.lcas:
1508
blocks = self._get_matching_blocks(revision_id, lca)
1509
unique_vs_lca, _ignored = self._unique_lines(blocks)
1510
new.update(unique_line_numbers.intersection(unique_vs_lca))
1511
killed.update(unique_line_numbers.difference(unique_vs_lca))