1
from merge_core import merge_flex, ApplyMerge3, BackupBeforeChange
2
from changeset import generate_changeset, ExceptionConflictHandler
3
from changeset import Inventory, Diff3Merge
4
from bzrlib import find_branch
6
from bzrlib.errors import BzrCommandError
7
from bzrlib.delta import compare_trees
8
from trace import mutter, warning
14
class UnrelatedBranches(BzrCommandError):
1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
branch as _mod_branch,
20
conflicts as _mod_conflicts,
30
revision as _mod_revision,
39
from bzrlib.symbol_versioning import (
43
# TODO: Report back as changes are merged in
46
def transform_tree(from_tree, to_tree, interesting_ids=None):
47
from_tree.lock_tree_write()
49
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
50
interesting_ids=interesting_ids, this_tree=from_tree)
55
class MergeHooks(hooks.Hooks):
15
57
def __init__(self):
16
msg = "Branches have no common ancestor, and no base revision"\
18
BzrCommandError.__init__(self, msg)
21
class MergeConflictHandler(ExceptionConflictHandler):
22
"""Handle conflicts encountered while merging"""
23
def __init__(self, dir, ignore_zero=False):
24
ExceptionConflictHandler.__init__(self, dir)
26
self.ignore_zero = ignore_zero
28
def copy(self, source, dest):
29
"""Copy the text and mode of a file
30
:param source: The path of the file to copy
31
:param dest: The distination file to create
33
s_file = file(source, "rb")
34
d_file = file(dest, "wb")
37
os.chmod(dest, 0777 & os.stat(source).st_mode)
39
def add_suffix(self, name, suffix, last_new_name=None):
40
"""Rename a file to append a suffix. If the new name exists, the
41
suffix is added repeatedly until a non-existant name is found
43
:param name: The path of the file
44
:param suffix: The suffix to append
45
:param last_new_name: (used for recursive calls) the last name tried
47
if last_new_name is None:
49
new_name = last_new_name+suffix
51
os.rename(name, new_name)
54
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
56
return self.add_suffix(name, suffix, last_new_name=new_name)
58
def conflict(self, text):
58
hooks.Hooks.__init__(self)
59
self.create_hook(hooks.HookPoint('merge_file_content',
60
"Called when file content needs to be merged (including when one "
61
"side has deleted the file and the other has changed it)."
62
"merge_file_content is called with a "
63
"bzrlib.merge.MergeHookParams. The function should return a tuple "
64
"of (status, lines), where status is one of 'not_applicable', "
65
"'success', 'conflicted', or 'delete'. If status is success or "
66
"conflicted, then lines should be an iterable of the new lines "
71
class MergeHookParams(object):
72
"""Object holding parameters passed to merge_file_content hooks.
74
There are 3 fields hooks can access:
76
:ivar merger: the Merger object
77
:ivar file_id: the file ID of the file being merged
78
:ivar trans_id: the transform ID for the merge of this file.
80
The lines of versions of the file being merged can be retrieved from the
83
params.merger.get_lines(params.merger.this_tree, params.file_id)
86
def __init__(self, merger, file_id, trans_id, this_pair, other_pair,
89
self.file_id = file_id
90
self.trans_id = trans_id
91
self.this_pair = this_pair
92
self.other_pair = other_pair
63
def merge_conflict(self, new_file, this_path, base_path, other_path):
65
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
66
main file will be a version with diff3 conflicts.
67
:param new_file: Path to the output file with diff3 markers
68
:param this_path: Path to the file text for the THIS tree
69
:param base_path: Path to the file text for the BASE tree
70
:param other_path: Path to the file text for the OTHER tree
72
self.add_suffix(this_path, ".THIS")
73
self.copy(base_path, this_path+".BASE")
74
self.copy(other_path, this_path+".OTHER")
75
os.rename(new_file, this_path)
76
self.conflict("Diff3 conflict encountered in %s" % this_path)
78
def target_exists(self, entry, target, old_path):
79
"""Handle the case when the target file or dir exists"""
80
moved_path = self.add_suffix(target, ".moved")
81
self.conflict("Moved existing %s to %s" % (target, moved_path))
83
def rmdir_non_empty(self, filename):
84
"""Handle the case where the dir to be removed still has contents"""
85
self.conflict("Directory %s not removed because it is not empty"\
90
if not self.ignore_zero:
91
print "%d conflicts encountered.\n" % self.conflicts
93
class SourceFile(object):
94
def __init__(self, path, id, present=None, isdir=None):
97
self.present = present
99
self.interesting = True
102
return "SourceFile(%s, %s)" % (self.path, self.id)
104
def get_tree(treespec, temp_root, label):
105
location, revno = treespec
106
branch = find_branch(location)
108
base_tree = branch.working_tree()
110
base_tree = branch.basis_tree()
112
base_tree = branch.revision_tree(branch.lookup_revision(revno))
113
temp_path = os.path.join(temp_root, label)
115
return branch, MergeTree(base_tree, temp_path)
118
def abspath(tree, file_id):
119
path = tree.inventory.id2path(file_id)
124
def file_exists(tree, file_id):
125
return tree.has_filename(tree.id2path(file_id))
95
def is_file_merge(self):
96
return self.this_pair[0] == 'file' and self.other_pair[0] == 'file'
127
def inventory_map(tree):
129
for file_id in tree.inventory:
130
path = abspath(tree, file_id)
131
inventory[path] = SourceFile(path, file_id)
135
class MergeTree(object):
136
def __init__(self, tree, tempdir):
98
@decorators.cachedproperty
100
"""The lines of the 'base' version of the file."""
101
return self.merger.get_lines(self.merger.base_tree, self.file_id)
103
@decorators.cachedproperty
104
def this_lines(self):
105
"""The lines of the 'this' version of the file."""
106
return self.merger.get_lines(self.merger.this_tree, self.file_id)
108
@decorators.cachedproperty
109
def other_lines(self):
110
"""The lines of the 'other' version of the file."""
111
return self.merger.get_lines(self.merger.other_tree, self.file_id)
114
class Merger(object):
118
def __init__(self, this_branch, other_tree=None, base_tree=None,
119
this_tree=None, pb=None, change_reporter=None,
120
recurse='down', revision_graph=None):
137
121
object.__init__(self)
138
if hasattr(tree, "basedir"):
139
self.root = tree.basedir
142
self.inventory = inventory_map(tree)
144
self.tempdir = tempdir
145
os.mkdir(os.path.join(self.tempdir, "texts"))
148
def readonly_path(self, id):
149
if id not in self.tree:
151
if self.root is not None:
152
return self.tree.abspath(self.tree.id2path(id))
154
if self.tree.inventory[id].kind in ("directory", "root_directory"):
156
if not self.cached.has_key(id):
157
path = os.path.join(self.tempdir, "texts", id)
158
outfile = file(path, "wb")
159
outfile.write(self.tree.get_file(id).read())
160
assert(os.path.exists(path))
161
self.cached[id] = path
162
return self.cached[id]
166
def merge(other_revision, base_revision,
167
check_clean=True, ignore_zero=False,
168
this_dir=None, backup_files=False, merge_type=ApplyMerge3,
170
"""Merge changes into a tree.
173
Base for three-way merge.
175
Other revision for three-way merge.
177
Directory to merge changes into; '.' by default.
179
If true, this_dir must have no uncommitted changes before the
182
tempdir = tempfile.mkdtemp(prefix="bzr-")
186
this_branch = find_branch(this_dir)
122
self.this_branch = this_branch
123
self.this_basis = _mod_revision.ensure_null(
124
this_branch.last_revision())
125
self.this_rev_id = None
126
self.this_tree = this_tree
127
self.this_revision_tree = None
128
self.this_basis_tree = None
129
self.other_tree = other_tree
130
self.other_branch = None
131
self.base_tree = base_tree
132
self.ignore_zero = False
133
self.backup_files = False
134
self.interesting_ids = None
135
self.interesting_files = None
136
self.show_base = False
137
self.reprocess = False
139
pb = progress.DummyProgress()
142
self.recurse = recurse
143
self.change_reporter = change_reporter
144
self._cached_trees = {}
145
self._revision_graph = revision_graph
146
self._base_is_ancestor = None
147
self._base_is_other_ancestor = None
148
self._is_criss_cross = None
149
self._lca_trees = None
151
def cache_trees_with_revision_ids(self, trees):
152
"""Cache any tree in trees if it has a revision_id."""
153
for maybe_tree in trees:
154
if maybe_tree is None:
157
rev_id = maybe_tree.get_revision_id()
158
except AttributeError:
160
self._cached_trees[rev_id] = maybe_tree
163
def revision_graph(self):
164
if self._revision_graph is None:
165
self._revision_graph = self.this_branch.repository.get_graph()
166
return self._revision_graph
168
def _set_base_is_ancestor(self, value):
169
self._base_is_ancestor = value
171
def _get_base_is_ancestor(self):
172
if self._base_is_ancestor is None:
173
self._base_is_ancestor = self.revision_graph.is_ancestor(
174
self.base_rev_id, self.this_basis)
175
return self._base_is_ancestor
177
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
179
def _set_base_is_other_ancestor(self, value):
180
self._base_is_other_ancestor = value
182
def _get_base_is_other_ancestor(self):
183
if self._base_is_other_ancestor is None:
184
if self.other_basis is None:
186
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
187
self.base_rev_id, self.other_basis)
188
return self._base_is_other_ancestor
190
base_is_other_ancestor = property(_get_base_is_other_ancestor,
191
_set_base_is_other_ancestor)
194
def from_uncommitted(tree, other_tree, pb=None, base_tree=None):
195
"""Return a Merger for uncommitted changes in other_tree.
197
:param tree: The tree to merge into
198
:param other_tree: The tree to get uncommitted changes from
199
:param pb: A progress indicator
200
:param base_tree: The basis to use for the merge. If unspecified,
201
other_tree.basis_tree() will be used.
203
if base_tree is None:
204
base_tree = other_tree.basis_tree()
205
merger = Merger(tree.branch, other_tree, base_tree, tree, pb)
206
merger.base_rev_id = merger.base_tree.get_revision_id()
207
merger.other_rev_id = None
208
merger.other_basis = merger.base_rev_id
212
def from_mergeable(klass, tree, mergeable, pb):
213
"""Return a Merger for a bundle or merge directive.
215
:param tree: The tree to merge changes into
216
:param mergeable: A merge directive or bundle
217
:param pb: A progress indicator
219
mergeable.install_revisions(tree.branch.repository)
220
base_revision_id, other_revision_id, verified =\
221
mergeable.get_merge_request(tree.branch.repository)
222
revision_graph = tree.branch.repository.get_graph()
223
if base_revision_id is not None:
224
if (base_revision_id != _mod_revision.NULL_REVISION and
225
revision_graph.is_ancestor(
226
base_revision_id, tree.branch.last_revision())):
227
base_revision_id = None
229
trace.warning('Performing cherrypick')
230
merger = klass.from_revision_ids(pb, tree, other_revision_id,
231
base_revision_id, revision_graph=
233
return merger, verified
236
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
237
base_branch=None, revision_graph=None,
239
"""Return a Merger for revision-ids.
241
:param pb: A progress indicator
242
:param tree: The tree to merge changes into
243
:param other: The revision-id to use as OTHER
244
:param base: The revision-id to use as BASE. If not specified, will
246
:param other_branch: A branch containing the other revision-id. If
247
not supplied, tree.branch is used.
248
:param base_branch: A branch containing the base revision-id. If
249
not supplied, other_branch or tree.branch will be used.
250
:param revision_graph: If you have a revision_graph precomputed, pass
251
it in, otherwise it will be created for you.
252
:param tree_branch: The branch associated with tree. If not supplied,
253
tree.branch will be used.
255
if tree_branch is None:
256
tree_branch = tree.branch
257
merger = Merger(tree_branch, this_tree=tree, pb=pb,
258
revision_graph=revision_graph)
259
if other_branch is None:
260
other_branch = tree.branch
261
merger.set_other_revision(other, other_branch)
265
if base_branch is None:
266
base_branch = other_branch
267
merger.set_base_revision(base, base_branch)
270
def revision_tree(self, revision_id, branch=None):
271
if revision_id not in self._cached_trees:
273
branch = self.this_branch
275
tree = self.this_tree.revision_tree(revision_id)
276
except errors.NoSuchRevisionInTree:
277
tree = branch.repository.revision_tree(revision_id)
278
self._cached_trees[revision_id] = tree
279
return self._cached_trees[revision_id]
281
def _get_tree(self, treespec, possible_transports=None):
282
from bzrlib import workingtree
283
location, revno = treespec
285
tree = workingtree.WorkingTree.open_containing(location)[0]
286
return tree.branch, tree
287
branch = _mod_branch.Branch.open_containing(
288
location, possible_transports)[0]
290
revision_id = branch.last_revision()
292
revision_id = branch.get_rev_id(revno)
293
revision_id = _mod_revision.ensure_null(revision_id)
294
return branch, self.revision_tree(revision_id, branch)
296
@deprecated_method(deprecated_in((2, 1, 0)))
297
def ensure_revision_trees(self):
298
if self.this_revision_tree is None:
299
self.this_basis_tree = self.revision_tree(self.this_basis)
300
if self.this_basis == self.this_rev_id:
301
self.this_revision_tree = self.this_basis_tree
303
if self.other_rev_id is None:
304
other_basis_tree = self.revision_tree(self.other_basis)
305
if other_basis_tree.has_changes(self.other_tree):
306
raise errors.WorkingTreeNotRevision(self.this_tree)
307
other_rev_id = self.other_basis
308
self.other_tree = other_basis_tree
310
@deprecated_method(deprecated_in((2, 1, 0)))
311
def file_revisions(self, file_id):
312
self.ensure_revision_trees()
313
def get_id(tree, file_id):
314
revision_id = tree.inventory[file_id].revision
316
if self.this_rev_id is None:
317
if self.this_basis_tree.get_file_sha1(file_id) != \
318
self.this_tree.get_file_sha1(file_id):
319
raise errors.WorkingTreeNotRevision(self.this_tree)
321
trees = (self.this_basis_tree, self.other_tree)
322
return [get_id(tree, file_id) for tree in trees]
324
@deprecated_method(deprecated_in((2, 1, 0)))
325
def check_basis(self, check_clean, require_commits=True):
326
if self.this_basis is None and require_commits is True:
327
raise errors.BzrCommandError(
328
"This branch has no commits."
329
" (perhaps you would prefer 'bzr pull')")
188
changes = compare_trees(this_branch.working_tree(),
189
this_branch.basis_tree(), False)
190
if changes.has_changed():
191
raise BzrCommandError("Working tree has uncommitted changes.")
192
other_branch, other_tree = get_tree(other_revision, tempdir, "other")
332
if self.this_basis != self.this_rev_id:
333
raise errors.UncommittedChanges(self.this_tree)
335
@deprecated_method(deprecated_in((2, 1, 0)))
336
def compare_basis(self):
338
basis_tree = self.revision_tree(self.this_tree.last_revision())
339
except errors.NoSuchRevision:
340
basis_tree = self.this_tree.basis_tree()
341
if not self.this_tree.has_changes(basis_tree):
342
self.this_rev_id = self.this_basis
344
def set_interesting_files(self, file_list):
345
self.interesting_files = file_list
347
def set_pending(self):
348
if (not self.base_is_ancestor or not self.base_is_other_ancestor
349
or self.other_rev_id is None):
353
def _add_parent(self):
354
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
355
new_parent_trees = []
356
for revision_id in new_parents:
358
tree = self.revision_tree(revision_id)
359
except errors.NoSuchRevision:
363
new_parent_trees.append((revision_id, tree))
365
self.this_tree.set_parent_trees(new_parent_trees,
366
allow_leftmost_as_ghost=True)
368
for _revision_id, tree in new_parent_trees:
372
def set_other(self, other_revision, possible_transports=None):
373
"""Set the revision and tree to merge from.
375
This sets the other_tree, other_rev_id, other_basis attributes.
377
:param other_revision: The [path, revision] list to merge from.
379
self.other_branch, self.other_tree = self._get_tree(other_revision,
381
if other_revision[1] == -1:
382
self.other_rev_id = _mod_revision.ensure_null(
383
self.other_branch.last_revision())
384
if _mod_revision.is_null(self.other_rev_id):
385
raise errors.NoCommits(self.other_branch)
386
self.other_basis = self.other_rev_id
387
elif other_revision[1] is not None:
388
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
389
self.other_basis = self.other_rev_id
391
self.other_rev_id = None
392
self.other_basis = self.other_branch.last_revision()
393
if self.other_basis is None:
394
raise errors.NoCommits(self.other_branch)
395
if self.other_rev_id is not None:
396
self._cached_trees[self.other_rev_id] = self.other_tree
397
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
399
def set_other_revision(self, revision_id, other_branch):
400
"""Set 'other' based on a branch and revision id
402
:param revision_id: The revision to use for a tree
403
:param other_branch: The branch containing this tree
405
self.other_rev_id = revision_id
406
self.other_branch = other_branch
407
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
408
self.other_tree = self.revision_tree(revision_id)
409
self.other_basis = revision_id
411
def set_base_revision(self, revision_id, branch):
412
"""Set 'base' based on a branch and revision id
414
:param revision_id: The revision to use for a tree
415
:param branch: The branch containing this tree
417
self.base_rev_id = revision_id
418
self.base_branch = branch
419
self._maybe_fetch(branch, self.this_branch, revision_id)
420
self.base_tree = self.revision_tree(revision_id)
422
def _maybe_fetch(self, source, target, revision_id):
423
if not source.repository.has_same_location(target.repository):
424
target.fetch(source, revision_id)
427
revisions = [_mod_revision.ensure_null(self.this_basis),
428
_mod_revision.ensure_null(self.other_basis)]
429
if _mod_revision.NULL_REVISION in revisions:
430
self.base_rev_id = _mod_revision.NULL_REVISION
431
self.base_tree = self.revision_tree(self.base_rev_id)
432
self._is_criss_cross = False
434
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
435
self._is_criss_cross = False
437
self.base_rev_id = _mod_revision.NULL_REVISION
439
self.base_rev_id = list(lcas)[0]
440
else: # len(lcas) > 1
442
# find_unique_lca can only handle 2 nodes, so we have to
443
# start back at the beginning. It is a shame to traverse
444
# the graph again, but better than re-implementing
446
self.base_rev_id = self.revision_graph.find_unique_lca(
447
revisions[0], revisions[1])
449
self.base_rev_id = self.revision_graph.find_unique_lca(
451
self._is_criss_cross = True
452
if self.base_rev_id == _mod_revision.NULL_REVISION:
453
raise errors.UnrelatedBranches()
454
if self._is_criss_cross:
455
trace.warning('Warning: criss-cross merge encountered. See bzr'
456
' help criss-cross.')
457
trace.mutter('Criss-cross lcas: %r' % lcas)
458
interesting_revision_ids = [self.base_rev_id]
459
interesting_revision_ids.extend(lcas)
460
interesting_trees = dict((t.get_revision_id(), t)
461
for t in self.this_branch.repository.revision_trees(
462
interesting_revision_ids))
463
self._cached_trees.update(interesting_trees)
464
self.base_tree = interesting_trees.pop(self.base_rev_id)
465
sorted_lca_keys = self.revision_graph.find_merge_order(
467
self._lca_trees = [interesting_trees[key]
468
for key in sorted_lca_keys]
470
self.base_tree = self.revision_tree(self.base_rev_id)
471
self.base_is_ancestor = True
472
self.base_is_other_ancestor = True
473
trace.mutter('Base revid: %r' % self.base_rev_id)
475
def set_base(self, base_revision):
476
"""Set the base revision to use for the merge.
478
:param base_revision: A 2-list containing a path and revision number.
480
trace.mutter("doing merge() with no base_revision specified")
193
481
if base_revision == [None, None]:
194
if other_revision[1] == -1:
197
o_revno = other_revision[1]
198
base_revno = this_branch.common_ancestor(other_branch,
199
other_revno=o_revno)[0]
200
if base_revno is None:
201
raise UnrelatedBranches()
202
base_revision = ['.', base_revno]
203
base_branch, base_tree = get_tree(base_revision, tempdir, "base")
204
if file_list is None:
205
interesting_ids = None
207
interesting_ids = set()
208
this_tree = this_branch.working_tree()
209
for fname in file_list:
210
path = this_branch.relpath(fname)
212
for tree in (this_tree, base_tree.tree, other_tree.tree):
213
file_id = tree.inventory.path2id(path)
484
base_branch, self.base_tree = self._get_tree(base_revision)
485
if base_revision[1] == -1:
486
self.base_rev_id = base_branch.last_revision()
487
elif base_revision[1] is None:
488
self.base_rev_id = _mod_revision.NULL_REVISION
490
self.base_rev_id = _mod_revision.ensure_null(
491
base_branch.get_rev_id(base_revision[1]))
492
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
494
def make_merger(self):
495
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
496
'other_tree': self.other_tree,
497
'interesting_ids': self.interesting_ids,
498
'interesting_files': self.interesting_files,
501
if self.merge_type.requires_base:
502
kwargs['base_tree'] = self.base_tree
503
if self.merge_type.supports_reprocess:
504
kwargs['reprocess'] = self.reprocess
506
raise errors.BzrError(
507
"Conflict reduction is not supported for merge"
508
" type %s." % self.merge_type)
509
if self.merge_type.supports_show_base:
510
kwargs['show_base'] = self.show_base
512
raise errors.BzrError("Showing base is not supported for this"
513
" merge type. %s" % self.merge_type)
514
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
515
and not self.base_is_other_ancestor):
516
raise errors.CannotReverseCherrypick()
517
if self.merge_type.supports_cherrypick:
518
kwargs['cherrypick'] = (not self.base_is_ancestor or
519
not self.base_is_other_ancestor)
520
if self._is_criss_cross and getattr(self.merge_type,
521
'supports_lca_trees', False):
522
kwargs['lca_trees'] = self._lca_trees
523
return self.merge_type(pb=self._pb,
524
change_reporter=self.change_reporter,
527
def _do_merge_to(self, merge):
528
if self.other_branch is not None:
529
self.other_branch.update_references(self.this_branch)
531
if self.recurse == 'down':
532
for relpath, file_id in self.this_tree.iter_references():
533
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
534
other_revision = self.other_tree.get_reference_revision(
536
if other_revision == sub_tree.last_revision():
538
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
539
sub_merge.merge_type = self.merge_type
540
other_branch = self.other_branch.reference_parent(file_id, relpath)
541
sub_merge.set_other_revision(other_revision, other_branch)
542
base_revision = self.base_tree.get_reference_revision(file_id)
543
sub_merge.base_tree = \
544
sub_tree.branch.repository.revision_tree(base_revision)
545
sub_merge.base_rev_id = base_revision
549
self.this_tree.lock_tree_write()
551
if self.base_tree is not None:
552
self.base_tree.lock_read()
554
if self.other_tree is not None:
555
self.other_tree.lock_read()
557
merge = self.make_merger()
558
self._do_merge_to(merge)
560
if self.other_tree is not None:
561
self.other_tree.unlock()
563
if self.base_tree is not None:
564
self.base_tree.unlock()
566
self.this_tree.unlock()
567
if len(merge.cooked_conflicts) == 0:
568
if not self.ignore_zero and not trace.is_quiet():
569
trace.note("All changes applied successfully.")
571
trace.note("%d conflicts encountered."
572
% len(merge.cooked_conflicts))
574
return len(merge.cooked_conflicts)
577
class _InventoryNoneEntry(object):
578
"""This represents an inventory entry which *isn't there*.
580
It simplifies the merging logic if we always have an InventoryEntry, even
581
if it isn't actually present
588
symlink_target = None
591
_none_entry = _InventoryNoneEntry()
594
class Merge3Merger(object):
595
"""Three-way merger that uses the merge3 text merger"""
597
supports_reprocess = True
598
supports_show_base = True
599
history_based = False
600
supports_cherrypick = True
601
supports_reverse_cherrypick = True
602
winner_idx = {"this": 2, "other": 1, "conflict": 1}
603
supports_lca_trees = True
605
def __init__(self, working_tree, this_tree, base_tree, other_tree,
606
interesting_ids=None, reprocess=False, show_base=False,
607
pb=progress.DummyProgress(), pp=None, change_reporter=None,
608
interesting_files=None, do_merge=True,
609
cherrypick=False, lca_trees=None):
610
"""Initialize the merger object and perform the merge.
612
:param working_tree: The working tree to apply the merge to
613
:param this_tree: The local tree in the merge operation
614
:param base_tree: The common tree in the merge operation
615
:param other_tree: The other tree to merge changes from
616
:param interesting_ids: The file_ids of files that should be
617
participate in the merge. May not be combined with
619
:param: reprocess If True, perform conflict-reduction processing.
620
:param show_base: If True, show the base revision in text conflicts.
621
(incompatible with reprocess)
622
:param pb: A Progress bar
623
:param pp: A ProgressPhase object
624
:param change_reporter: An object that should report changes made
625
:param interesting_files: The tree-relative paths of files that should
626
participate in the merge. If these paths refer to directories,
627
the contents of those directories will also be included. May not
628
be combined with interesting_ids. If neither interesting_files nor
629
interesting_ids is specified, all files may participate in the
631
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
632
if the ancestry was found to include a criss-cross merge.
633
Otherwise should be None.
635
object.__init__(self)
636
if interesting_files is not None and interesting_ids is not None:
638
'specify either interesting_ids or interesting_files')
639
self.interesting_ids = interesting_ids
640
self.interesting_files = interesting_files
641
self.this_tree = working_tree
642
self.base_tree = base_tree
643
self.other_tree = other_tree
644
self._raw_conflicts = []
645
self.cooked_conflicts = []
646
self.reprocess = reprocess
647
self.show_base = show_base
648
self._lca_trees = lca_trees
649
# Uncommenting this will change the default algorithm to always use
650
# _entries_lca. This can be useful for running the test suite and
651
# making sure we haven't missed any corner cases.
652
# if lca_trees is None:
653
# self._lca_trees = [self.base_tree]
656
self.change_reporter = change_reporter
657
self.cherrypick = cherrypick
659
self.pp = progress.ProgressPhase("Merge phase", 3, self.pb)
664
self.this_tree.lock_tree_write()
665
self.base_tree.lock_read()
666
self.other_tree.lock_read()
668
self.tt = transform.TreeTransform(self.this_tree, self.pb)
671
self._compute_transform()
673
results = self.tt.apply(no_conflicts=True)
674
self.write_modified(results)
676
self.this_tree.add_conflicts(self.cooked_conflicts)
677
except errors.UnsupportedOperation:
682
self.other_tree.unlock()
683
self.base_tree.unlock()
684
self.this_tree.unlock()
687
def make_preview_transform(self):
688
self.base_tree.lock_read()
689
self.other_tree.lock_read()
690
self.tt = transform.TransformPreview(self.this_tree)
693
self._compute_transform()
696
self.other_tree.unlock()
697
self.base_tree.unlock()
701
def _compute_transform(self):
702
if self._lca_trees is None:
703
entries = self._entries3()
704
resolver = self._three_way
706
entries = self._entries_lca()
707
resolver = self._lca_multi_way
708
child_pb = ui.ui_factory.nested_progress_bar()
710
for num, (file_id, changed, parents3, names3,
711
executable3) in enumerate(entries):
712
child_pb.update('Preparing file merge', num, len(entries))
713
self._merge_names(file_id, parents3, names3, resolver=resolver)
715
file_status = self.merge_contents(file_id)
717
file_status = 'unmodified'
718
self._merge_executable(file_id,
719
executable3, file_status, resolver=resolver)
724
child_pb = ui.ui_factory.nested_progress_bar()
726
fs_conflicts = transform.resolve_conflicts(self.tt, child_pb,
727
lambda t, c: transform.conflict_pass(t, c, self.other_tree))
730
if self.change_reporter is not None:
731
from bzrlib import delta
732
delta.report_changes(
733
self.tt.iter_changes(), self.change_reporter)
734
self.cook_conflicts(fs_conflicts)
735
for conflict in self.cooked_conflicts:
736
trace.warning(conflict)
739
"""Gather data about files modified between three trees.
741
Return a list of tuples of file_id, changed, parents3, names3,
742
executable3. changed is a boolean indicating whether the file contents
743
or kind were changed. parents3 is a tuple of parent ids for base,
744
other and this. names3 is a tuple of names for base, other and this.
745
executable3 is a tuple of execute-bit values for base, other and this.
748
iterator = self.other_tree.iter_changes(self.base_tree,
749
include_unchanged=True, specific_files=self.interesting_files,
750
extra_trees=[self.this_tree])
751
this_entries = dict((e.file_id, e) for p, e in
752
self.this_tree.iter_entries_by_dir(
753
self.interesting_ids))
754
for (file_id, paths, changed, versioned, parents, names, kind,
755
executable) in iterator:
756
if (self.interesting_ids is not None and
757
file_id not in self.interesting_ids):
759
entry = this_entries.get(file_id)
760
if entry is not None:
761
this_name = entry.name
762
this_parent = entry.parent_id
763
this_executable = entry.executable
767
this_executable = None
768
parents3 = parents + (this_parent,)
769
names3 = names + (this_name,)
770
executable3 = executable + (this_executable,)
771
result.append((file_id, changed, parents3, names3, executable3))
774
def _entries_lca(self):
775
"""Gather data about files modified between multiple trees.
777
This compares OTHER versus all LCA trees, and for interesting entries,
778
it then compares with THIS and BASE.
780
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
781
:return: [(file_id, changed, parents, names, executable)]
782
file_id Simple file_id of the entry
783
changed Boolean, True if the kind or contents changed
785
parents ((base, [parent_id, in, lcas]), parent_id_other,
787
names ((base, [name, in, lcas]), name_in_other, name_in_this)
788
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
790
if self.interesting_files is not None:
791
lookup_trees = [self.this_tree, self.base_tree]
792
lookup_trees.extend(self._lca_trees)
793
# I think we should include the lca trees as well
794
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
797
interesting_ids = self.interesting_ids
799
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
801
base_inventory = self.base_tree.inventory
802
this_inventory = self.this_tree.inventory
803
for path, file_id, other_ie, lca_values in walker.iter_all():
804
# Is this modified at all from any of the other trees?
806
other_ie = _none_entry
807
if interesting_ids is not None and file_id not in interesting_ids:
810
# If other_revision is found in any of the lcas, that means this
811
# node is uninteresting. This is because when merging, if there are
812
# multiple heads(), we have to create a new node. So if we didn't,
813
# we know that the ancestry is linear, and that OTHER did not
815
# See doc/developers/lca_merge_resolution.txt for details
816
other_revision = other_ie.revision
817
if other_revision is not None:
818
# We can't use this shortcut when other_revision is None,
819
# because it may be None because things are WorkingTrees, and
820
# not because it is *actually* None.
821
is_unmodified = False
822
for lca_path, ie in lca_values:
823
if ie is not None and ie.revision == other_revision:
830
for lca_path, lca_ie in lca_values:
832
lca_entries.append(_none_entry)
834
lca_entries.append(lca_ie)
836
if file_id in base_inventory:
837
base_ie = base_inventory[file_id]
839
base_ie = _none_entry
841
if file_id in this_inventory:
842
this_ie = this_inventory[file_id]
844
this_ie = _none_entry
850
for lca_ie in lca_entries:
851
lca_kinds.append(lca_ie.kind)
852
lca_parent_ids.append(lca_ie.parent_id)
853
lca_names.append(lca_ie.name)
854
lca_executable.append(lca_ie.executable)
856
kind_winner = self._lca_multi_way(
857
(base_ie.kind, lca_kinds),
858
other_ie.kind, this_ie.kind)
859
parent_id_winner = self._lca_multi_way(
860
(base_ie.parent_id, lca_parent_ids),
861
other_ie.parent_id, this_ie.parent_id)
862
name_winner = self._lca_multi_way(
863
(base_ie.name, lca_names),
864
other_ie.name, this_ie.name)
866
content_changed = True
867
if kind_winner == 'this':
868
# No kind change in OTHER, see if there are *any* changes
869
if other_ie.kind == 'directory':
870
if parent_id_winner == 'this' and name_winner == 'this':
871
# No change for this directory in OTHER, skip
873
content_changed = False
874
elif other_ie.kind is None or other_ie.kind == 'file':
875
def get_sha1(ie, tree):
876
if ie.kind != 'file':
878
return tree.get_file_sha1(file_id)
879
base_sha1 = get_sha1(base_ie, self.base_tree)
880
lca_sha1s = [get_sha1(ie, tree) for ie, tree
881
in zip(lca_entries, self._lca_trees)]
882
this_sha1 = get_sha1(this_ie, self.this_tree)
883
other_sha1 = get_sha1(other_ie, self.other_tree)
884
sha1_winner = self._lca_multi_way(
885
(base_sha1, lca_sha1s), other_sha1, this_sha1,
886
allow_overriding_lca=False)
887
exec_winner = self._lca_multi_way(
888
(base_ie.executable, lca_executable),
889
other_ie.executable, this_ie.executable)
890
if (parent_id_winner == 'this' and name_winner == 'this'
891
and sha1_winner == 'this' and exec_winner == 'this'):
892
# No kind, parent, name, exec, or content change for
893
# OTHER, so this node is not considered interesting
895
if sha1_winner == 'this':
896
content_changed = False
897
elif other_ie.kind == 'symlink':
898
def get_target(ie, tree):
899
if ie.kind != 'symlink':
901
return tree.get_symlink_target(file_id)
902
base_target = get_target(base_ie, self.base_tree)
903
lca_targets = [get_target(ie, tree) for ie, tree
904
in zip(lca_entries, self._lca_trees)]
905
this_target = get_target(this_ie, self.this_tree)
906
other_target = get_target(other_ie, self.other_tree)
907
target_winner = self._lca_multi_way(
908
(base_target, lca_targets),
909
other_target, this_target)
910
if (parent_id_winner == 'this' and name_winner == 'this'
911
and target_winner == 'this'):
912
# No kind, parent, name, or symlink target change
915
if target_winner == 'this':
916
content_changed = False
917
elif other_ie.kind == 'tree-reference':
918
# The 'changed' information seems to be handled at a higher
919
# level. At least, _entries3 returns False for content
920
# changed, even when at a new revision_id.
921
content_changed = False
922
if (parent_id_winner == 'this' and name_winner == 'this'):
923
# Nothing interesting
926
raise AssertionError('unhandled kind: %s' % other_ie.kind)
927
# XXX: We need to handle kind == 'symlink'
929
# If we have gotten this far, that means something has changed
930
result.append((file_id, content_changed,
931
((base_ie.parent_id, lca_parent_ids),
932
other_ie.parent_id, this_ie.parent_id),
933
((base_ie.name, lca_names),
934
other_ie.name, this_ie.name),
935
((base_ie.executable, lca_executable),
936
other_ie.executable, this_ie.executable)
943
self.tt.final_kind(self.tt.root)
944
except errors.NoSuchFile:
945
self.tt.cancel_deletion(self.tt.root)
946
if self.tt.final_file_id(self.tt.root) is None:
947
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
949
other_root_file_id = self.other_tree.get_root_id()
950
if other_root_file_id is None:
952
other_root = self.tt.trans_id_file_id(other_root_file_id)
953
if other_root == self.tt.root:
956
self.tt.final_kind(other_root)
957
except errors.NoSuchFile:
959
if self.other_tree.inventory.root.file_id in self.this_tree.inventory:
960
# the other tree's root is a non-root in the current tree
962
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
963
self.tt.cancel_creation(other_root)
964
self.tt.cancel_versioning(other_root)
966
def reparent_children(self, ie, target):
967
for thing, child in ie.children.iteritems():
968
trans_id = self.tt.trans_id_file_id(child.file_id)
969
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
971
def write_modified(self, results):
973
for path in results.modified_paths:
974
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
977
hash = self.this_tree.get_file_sha1(file_id)
980
modified_hashes[file_id] = hash
981
self.this_tree.set_merge_modified(modified_hashes)
984
def parent(entry, file_id):
985
"""Determine the parent for a file_id (used as a key method)"""
988
return entry.parent_id
991
def name(entry, file_id):
992
"""Determine the name for a file_id (used as a key method)"""
998
def contents_sha1(tree, file_id):
999
"""Determine the sha1 of the file contents (used as a key method)."""
1000
if file_id not in tree:
1002
return tree.get_file_sha1(file_id)
1005
def executable(tree, file_id):
1006
"""Determine the executability of a file-id (used as a key method)."""
1007
if file_id not in tree:
1009
if tree.kind(file_id) != "file":
1011
return tree.is_executable(file_id)
1014
def kind(tree, file_id):
1015
"""Determine the kind of a file-id (used as a key method)."""
1016
if file_id not in tree:
1018
return tree.kind(file_id)
1021
def _three_way(base, other, this):
1022
#if base == other, either they all agree, or only THIS has changed.
1025
elif this not in (base, other):
1027
# "Ambiguous clean merge" -- both sides have made the same change.
1030
# this == base: only other has changed.
1035
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
1036
"""Consider LCAs when determining whether a change has occurred.
1038
If LCAS are all identical, this is the same as a _three_way comparison.
1040
:param bases: value in (BASE, [LCAS])
1041
:param other: value in OTHER
1042
:param this: value in THIS
1043
:param allow_overriding_lca: If there is more than one unique lca
1044
value, allow OTHER to override THIS if it has a new value, and
1045
THIS only has an lca value, or vice versa. This is appropriate for
1046
truly scalar values, not as much for non-scalars.
1047
:return: 'this', 'other', or 'conflict' depending on whether an entry
1050
# See doc/developers/lca_tree_merging.txt for details about this
1053
# Either Ambiguously clean, or nothing was actually changed. We
1056
base_val, lca_vals = bases
1057
# Remove 'base_val' from the lca_vals, because it is not interesting
1058
filtered_lca_vals = [lca_val for lca_val in lca_vals
1059
if lca_val != base_val]
1060
if len(filtered_lca_vals) == 0:
1061
return Merge3Merger._three_way(base_val, other, this)
1063
unique_lca_vals = set(filtered_lca_vals)
1064
if len(unique_lca_vals) == 1:
1065
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1067
if allow_overriding_lca:
1068
if other in unique_lca_vals:
1069
if this in unique_lca_vals:
1070
# Each side picked a different lca, conflict
1073
# This has a value which supersedes both lca values, and
1074
# other only has an lca value
1076
elif this in unique_lca_vals:
1077
# OTHER has a value which supersedes both lca values, and this
1078
# only has an lca value
1081
# At this point, the lcas disagree, and the tips disagree
1085
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
1086
"""Do a three-way test on a scalar.
1087
Return "this", "other" or "conflict", depending whether a value wins.
1089
key_base = key(base_tree, file_id)
1090
key_other = key(other_tree, file_id)
1091
#if base == other, either they all agree, or only THIS has changed.
1092
if key_base == key_other:
1094
key_this = key(this_tree, file_id)
1095
# "Ambiguous clean merge"
1096
if key_this == key_other:
1098
elif key_this == key_base:
1103
def merge_names(self, file_id):
1104
def get_entry(tree):
1105
if file_id in tree.inventory:
1106
return tree.inventory[file_id]
1109
this_entry = get_entry(self.this_tree)
1110
other_entry = get_entry(self.other_tree)
1111
base_entry = get_entry(self.base_tree)
1112
entries = (base_entry, other_entry, this_entry)
1115
for entry in entries:
1118
parents.append(None)
1120
names.append(entry.name)
1121
parents.append(entry.parent_id)
1122
return self._merge_names(file_id, parents, names,
1123
resolver=self._three_way)
1125
def _merge_names(self, file_id, parents, names, resolver):
1126
"""Perform a merge on file_id names and parents"""
1127
base_name, other_name, this_name = names
1128
base_parent, other_parent, this_parent = parents
1130
name_winner = resolver(*names)
1132
parent_id_winner = resolver(*parents)
1133
if this_name is None:
1134
if name_winner == "this":
1135
name_winner = "other"
1136
if parent_id_winner == "this":
1137
parent_id_winner = "other"
1138
if name_winner == "this" and parent_id_winner == "this":
1140
if name_winner == "conflict":
1141
trans_id = self.tt.trans_id_file_id(file_id)
1142
self._raw_conflicts.append(('name conflict', trans_id,
1143
this_name, other_name))
1144
if parent_id_winner == "conflict":
1145
trans_id = self.tt.trans_id_file_id(file_id)
1146
self._raw_conflicts.append(('parent conflict', trans_id,
1147
this_parent, other_parent))
1148
if other_name is None:
1149
# it doesn't matter whether the result was 'other' or
1150
# 'conflict'-- if there's no 'other', we leave it alone.
1152
# if we get here, name_winner and parent_winner are set to safe values.
1153
trans_id = self.tt.trans_id_file_id(file_id)
1154
parent_id = parents[self.winner_idx[parent_id_winner]]
1155
if parent_id is not None:
1156
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1157
self.tt.adjust_path(names[self.winner_idx[name_winner]],
1158
parent_trans_id, trans_id)
1160
def merge_contents(self, file_id):
1161
"""Performs a merge on file_id contents."""
1162
def contents_pair(tree):
1163
if file_id not in tree:
1165
kind = tree.kind(file_id)
1167
contents = tree.get_file_sha1(file_id)
1168
elif kind == "symlink":
1169
contents = tree.get_symlink_target(file_id)
1172
return kind, contents
1174
# See SPOT run. run, SPOT, run.
1175
# So we're not QUITE repeating ourselves; we do tricky things with
1177
base_pair = contents_pair(self.base_tree)
1178
other_pair = contents_pair(self.other_tree)
1180
this_pair = contents_pair(self.this_tree)
1181
lca_pairs = [contents_pair(tree) for tree in self._lca_trees]
1182
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1183
this_pair, allow_overriding_lca=False)
1185
if base_pair == other_pair:
1188
# We delayed evaluating this_pair as long as we can to avoid
1189
# unnecessary sha1 calculation
1190
this_pair = contents_pair(self.this_tree)
1191
winner = self._three_way(base_pair, other_pair, this_pair)
1192
if winner == 'this':
1193
# No interesting changes introduced by OTHER
1195
# We have a hypothetical conflict, but if we have files, then we
1196
# can try to merge the content
1197
trans_id = self.tt.trans_id_file_id(file_id)
1198
params = MergeHookParams(self, file_id, trans_id, this_pair,
1200
hooks = Merger.hooks['merge_file_content']
1201
hooks = list(hooks) + [self.default_text_merge]
1202
hook_status = 'not_applicable'
1204
hook_status, lines = hook(params)
1205
if hook_status != 'not_applicable':
1206
# Don't try any more hooks, this one applies.
1209
if hook_status == 'not_applicable':
1210
# This is a contents conflict, because none of the available
1211
# functions could merge it.
1213
name = self.tt.final_name(trans_id)
1214
parent_id = self.tt.final_parent(trans_id)
1215
if file_id in self.this_tree.inventory:
1216
self.tt.unversion_file(trans_id)
1217
file_group = self._dump_conflicts(name, parent_id, file_id,
1219
self._raw_conflicts.append(('contents conflict', file_group))
1220
elif hook_status == 'success':
1221
self.tt.create_file(lines, trans_id)
1222
elif hook_status == 'conflicted':
1223
# XXX: perhaps the hook should be able to provide
1224
# the BASE/THIS/OTHER files?
1225
self.tt.create_file(lines, trans_id)
1226
self._raw_conflicts.append(('text conflict', trans_id))
1227
name = self.tt.final_name(trans_id)
1228
parent_id = self.tt.final_parent(trans_id)
1229
file_group = self._dump_conflicts(name, parent_id, file_id)
1230
file_group.append(trans_id)
1231
elif hook_status == 'delete':
1232
self.tt.unversion_file(trans_id)
1234
elif hook_status == 'done':
1235
# The hook function did whatever it needs to do directly, no
1236
# further action needed here.
1239
raise AssertionError('unknown hook_status: %r' % (hook_status,))
1240
if file_id not in self.this_tree and result == "modified":
1241
self.tt.version_file(file_id, trans_id)
1243
self.tt.tree_kind(trans_id)
1244
self.tt.delete_contents(trans_id)
1245
except errors.NoSuchFile:
1249
def _default_other_winner_merge(self, merge_hook_params):
1250
"""Replace this contents with other."""
1251
file_id = merge_hook_params.file_id
1252
trans_id = merge_hook_params.trans_id
1253
file_in_this = file_id in self.this_tree
1254
if file_id in self.other_tree:
1255
# OTHER changed the file
1257
if wt.supports_content_filtering():
1258
# We get the path from the working tree if it exists.
1259
# That fails though when OTHER is adding a file, so
1260
# we fall back to the other tree to find the path if
1261
# it doesn't exist locally.
1263
filter_tree_path = wt.id2path(file_id)
1264
except errors.NoSuchId:
1265
filter_tree_path = self.other_tree.id2path(file_id)
1267
# Skip the id2path lookup for older formats
1268
filter_tree_path = None
1269
transform.create_from_tree(self.tt, trans_id,
1270
self.other_tree, file_id,
1271
filter_tree_path=filter_tree_path)
1274
# OTHER deleted the file
1275
return 'delete', None
1277
raise AssertionError(
1278
'winner is OTHER, but file_id %r not in THIS or OTHER tree'
1281
def default_text_merge(self, merge_hook_params):
1282
if merge_hook_params.winner == 'other':
1283
# OTHER is a straight winner, so replace this contents with other
1284
return self._default_other_winner_merge(merge_hook_params)
1285
elif merge_hook_params.is_file_merge():
1286
# THIS and OTHER are both files, so text merge. Either
1287
# BASE is a file, or both converted to files, so at least we
1288
# have agreement that output should be a file.
1290
self.text_merge(merge_hook_params.file_id,
1291
merge_hook_params.trans_id)
1292
except errors.BinaryFile:
1293
return 'not_applicable', None
1296
return 'not_applicable', None
1298
def get_lines(self, tree, file_id):
1299
"""Return the lines in a file, or an empty list."""
1301
return tree.get_file(file_id).readlines()
1305
def text_merge(self, file_id, trans_id):
1306
"""Perform a three-way text merge on a file_id"""
1307
# it's possible that we got here with base as a different type.
1308
# if so, we just want two-way text conflicts.
1309
if file_id in self.base_tree and \
1310
self.base_tree.kind(file_id) == "file":
1311
base_lines = self.get_lines(self.base_tree, file_id)
1314
other_lines = self.get_lines(self.other_tree, file_id)
1315
this_lines = self.get_lines(self.this_tree, file_id)
1316
m3 = merge3.Merge3(base_lines, this_lines, other_lines,
1317
is_cherrypick=self.cherrypick)
1318
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
1319
if self.show_base is True:
1320
base_marker = '|' * 7
1324
def iter_merge3(retval):
1325
retval["text_conflicts"] = False
1326
for line in m3.merge_lines(name_a = "TREE",
1327
name_b = "MERGE-SOURCE",
1328
name_base = "BASE-REVISION",
1329
start_marker=start_marker,
1330
base_marker=base_marker,
1331
reprocess=self.reprocess):
1332
if line.startswith(start_marker):
1333
retval["text_conflicts"] = True
1334
yield line.replace(start_marker, '<' * 7)
1338
merge3_iterator = iter_merge3(retval)
1339
self.tt.create_file(merge3_iterator, trans_id)
1340
if retval["text_conflicts"] is True:
1341
self._raw_conflicts.append(('text conflict', trans_id))
1342
name = self.tt.final_name(trans_id)
1343
parent_id = self.tt.final_parent(trans_id)
1344
file_group = self._dump_conflicts(name, parent_id, file_id,
1345
this_lines, base_lines,
1347
file_group.append(trans_id)
1349
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
1350
base_lines=None, other_lines=None, set_version=False,
1352
"""Emit conflict files.
1353
If this_lines, base_lines, or other_lines are omitted, they will be
1354
determined automatically. If set_version is true, the .OTHER, .THIS
1355
or .BASE (in that order) will be created as versioned files.
1357
data = [('OTHER', self.other_tree, other_lines),
1358
('THIS', self.this_tree, this_lines)]
1360
data.append(('BASE', self.base_tree, base_lines))
1362
# We need to use the actual path in the working tree of the file here,
1363
# ignoring the conflict suffixes
1365
if wt.supports_content_filtering():
1367
filter_tree_path = wt.id2path(file_id)
1368
except errors.NoSuchId:
1369
# file has been deleted
1370
filter_tree_path = None
1372
# Skip the id2path lookup for older formats
1373
filter_tree_path = None
1377
for suffix, tree, lines in data:
1379
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1380
suffix, lines, filter_tree_path)
1381
file_group.append(trans_id)
1382
if set_version and not versioned:
1383
self.tt.version_file(file_id, trans_id)
1387
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1388
lines=None, filter_tree_path=None):
1389
"""Emit a single conflict file."""
1390
name = name + '.' + suffix
1391
trans_id = self.tt.create_path(name, parent_id)
1392
transform.create_from_tree(self.tt, trans_id, tree, file_id, lines,
1396
def merge_executable(self, file_id, file_status):
1397
"""Perform a merge on the execute bit."""
1398
executable = [self.executable(t, file_id) for t in (self.base_tree,
1399
self.other_tree, self.this_tree)]
1400
self._merge_executable(file_id, executable, file_status,
1401
resolver=self._three_way)
1403
def _merge_executable(self, file_id, executable, file_status,
1405
"""Perform a merge on the execute bit."""
1406
base_executable, other_executable, this_executable = executable
1407
if file_status == "deleted":
1409
winner = resolver(*executable)
1410
if winner == "conflict":
1411
# There must be a None in here, if we have a conflict, but we
1412
# need executability since file status was not deleted.
1413
if self.executable(self.other_tree, file_id) is None:
1417
if winner == 'this' and file_status != "modified":
1419
trans_id = self.tt.trans_id_file_id(file_id)
1421
if self.tt.final_kind(trans_id) != "file":
1423
except errors.NoSuchFile:
1425
if winner == "this":
1426
executability = this_executable
1428
if file_id in self.other_tree:
1429
executability = other_executable
1430
elif file_id in self.this_tree:
1431
executability = this_executable
1432
elif file_id in self.base_tree:
1433
executability = base_executable
1434
if executability is not None:
1435
trans_id = self.tt.trans_id_file_id(file_id)
1436
self.tt.set_executability(executability, trans_id)
1438
def cook_conflicts(self, fs_conflicts):
1439
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1441
self.cooked_conflicts.extend(transform.cook_conflicts(
1442
fs_conflicts, self.tt))
1443
fp = transform.FinalPaths(self.tt)
1444
for conflict in self._raw_conflicts:
1445
conflict_type = conflict[0]
1446
if conflict_type in ('name conflict', 'parent conflict'):
1447
trans_id = conflict[1]
1448
conflict_args = conflict[2:]
1449
if trans_id not in name_conflicts:
1450
name_conflicts[trans_id] = {}
1451
transform.unique_add(name_conflicts[trans_id], conflict_type,
1453
if conflict_type == 'contents conflict':
1454
for trans_id in conflict[1]:
1455
file_id = self.tt.final_file_id(trans_id)
214
1456
if file_id is not None:
215
interesting_ids.add(file_id)
218
raise BzrCommandError("%s is not a source file in any"
220
merge_inner(this_branch, other_tree, base_tree, tempdir,
221
ignore_zero=ignore_zero, backup_files=backup_files,
222
merge_type=merge_type, interesting_ids=interesting_ids)
224
shutil.rmtree(tempdir)
227
def set_interesting(inventory_a, inventory_b, interesting_ids):
228
"""Mark files whose ids are in interesting_ids as interesting
230
for inventory in (inventory_a, inventory_b):
231
for path, source_file in inventory.iteritems():
232
source_file.interesting = source_file.id in interesting_ids
235
def set_optimized(tree_a, tree_b, inventory_a, inventory_b):
236
"""Mark files that have changed texts as interesting
238
for file_id in tree_a.tree.inventory:
239
if file_id not in tree_b.tree.inventory:
241
entry_a = tree_a.tree.inventory[file_id]
242
entry_b = tree_b.tree.inventory[file_id]
243
if (entry_a.kind, entry_b.kind) != ("file", "file"):
245
if None in (entry_a.text_id, entry_b.text_id):
247
if entry_a.text_id != entry_b.text_id:
249
inventory_a[abspath(tree_a.tree, file_id)].interesting = False
250
inventory_b[abspath(tree_b.tree, file_id)].interesting = False
253
def generate_cset_optimized(tree_a, tree_b, inventory_a, inventory_b,
254
interesting_ids=None):
255
"""Generate a changeset, with preprocessing to select interesting files.
256
using the text_id to mark really-changed files.
257
This permits blazing comparisons when text_ids are present. It also
258
disables metadata comparison for files with identical texts.
260
if interesting_ids is None:
261
set_optimized(tree_a, tree_b, inventory_a, inventory_b)
263
set_interesting(inventory_a, inventory_b, interesting_ids)
264
cset = generate_changeset(tree_a, tree_b, inventory_a, inventory_b)
265
for entry in cset.entries.itervalues():
266
entry.metadata_change = None
270
def merge_inner(this_branch, other_tree, base_tree, tempdir,
271
ignore_zero=False, merge_type=ApplyMerge3, backup_files=False,
272
interesting_ids=None):
274
def merge_factory(base_file, other_file):
275
contents_change = merge_type(base_file, other_file)
277
contents_change = BackupBeforeChange(contents_change)
278
return contents_change
280
def generate_cset(tree_a, tree_b, inventory_a, inventory_b):
281
return generate_cset_optimized(tree_a, tree_b, inventory_a, inventory_b,
284
this_tree = get_tree((this_branch.base, None), tempdir, "this")[1]
286
def get_inventory(tree):
287
return tree.inventory
289
inv_changes = merge_flex(this_tree, base_tree, other_tree,
290
generate_cset, get_inventory,
291
MergeConflictHandler(base_tree.root,
292
ignore_zero=ignore_zero),
293
merge_factory=merge_factory)
296
for id, path in inv_changes.iteritems():
301
assert path.startswith('./')
303
adjust_ids.append((path, id))
304
this_branch.set_inventory(regen_inventory(this_branch, this_tree.root, adjust_ids))
307
def regen_inventory(this_branch, root, new_entries):
308
old_entries = this_branch.read_working_inventory()
311
for file_id in old_entries:
312
entry = old_entries[file_id]
313
path = old_entries.id2path(file_id)
314
new_inventory[file_id] = (path, file_id, entry.parent_id, entry.kind)
315
by_path[path] = file_id
320
for path, file_id in new_entries:
322
del new_inventory[file_id]
325
new_path_list.append((path, file_id))
326
if file_id not in old_entries:
328
# Ensure no file is added before its parent
330
for path, file_id in new_path_list:
334
parent = by_path[os.path.dirname(path)]
335
kind = bzrlib.osutils.file_kind(os.path.join(root, path))
336
new_inventory[file_id] = (path, file_id, parent, kind)
337
by_path[path] = file_id
339
# Get a list in insertion order
340
new_inventory_list = new_inventory.values()
341
mutter ("""Inventory regeneration:
342
old length: %i insertions: %i deletions: %i new_length: %i"""\
343
% (len(old_entries), insertions, deletions, len(new_inventory_list)))
344
assert len(new_inventory_list) == len(old_entries) + insertions - deletions
345
new_inventory_list.sort()
346
return new_inventory_list
348
merge_types = { "merge3": (ApplyMerge3, "Native diff3-style merge"),
349
"diff3": (Diff3Merge, "Merge using external diff3")
1458
path = fp.get_path(trans_id)
1459
for suffix in ('.BASE', '.THIS', '.OTHER'):
1460
if path.endswith(suffix):
1461
path = path[:-len(suffix)]
1463
c = _mod_conflicts.Conflict.factory(conflict_type,
1464
path=path, file_id=file_id)
1465
self.cooked_conflicts.append(c)
1466
if conflict_type == 'text conflict':
1467
trans_id = conflict[1]
1468
path = fp.get_path(trans_id)
1469
file_id = self.tt.final_file_id(trans_id)
1470
c = _mod_conflicts.Conflict.factory(conflict_type,
1471
path=path, file_id=file_id)
1472
self.cooked_conflicts.append(c)
1474
for trans_id, conflicts in name_conflicts.iteritems():
1476
this_parent, other_parent = conflicts['parent conflict']
1477
if this_parent == other_parent:
1478
raise AssertionError()
1480
this_parent = other_parent = \
1481
self.tt.final_file_id(self.tt.final_parent(trans_id))
1483
this_name, other_name = conflicts['name conflict']
1484
if this_name == other_name:
1485
raise AssertionError()
1487
this_name = other_name = self.tt.final_name(trans_id)
1488
other_path = fp.get_path(trans_id)
1489
if this_parent is not None and this_name is not None:
1490
this_parent_path = \
1491
fp.get_path(self.tt.trans_id_file_id(this_parent))
1492
this_path = osutils.pathjoin(this_parent_path, this_name)
1494
this_path = "<deleted>"
1495
file_id = self.tt.final_file_id(trans_id)
1496
c = _mod_conflicts.Conflict.factory('path conflict', path=this_path,
1497
conflict_path=other_path,
1499
self.cooked_conflicts.append(c)
1500
self.cooked_conflicts.sort(key=_mod_conflicts.Conflict.sort_key)
1503
class WeaveMerger(Merge3Merger):
1504
"""Three-way tree merger, text weave merger."""
1505
supports_reprocess = True
1506
supports_show_base = False
1507
supports_reverse_cherrypick = False
1508
history_based = True
1510
def _generate_merge_plan(self, file_id, base):
1511
return self.this_tree.plan_file_merge(file_id, self.other_tree,
1514
def _merged_lines(self, file_id):
1515
"""Generate the merged lines.
1516
There is no distinction between lines that are meant to contain <<<<<<<
1520
base = self.base_tree
1523
plan = self._generate_merge_plan(file_id, base)
1524
if 'merge' in debug.debug_flags:
1526
trans_id = self.tt.trans_id_file_id(file_id)
1527
name = self.tt.final_name(trans_id) + '.plan'
1528
contents = ('%11s|%s' % l for l in plan)
1529
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1530
textmerge = versionedfile.PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1531
'>>>>>>> MERGE-SOURCE\n')
1532
lines, conflicts = textmerge.merge_lines(self.reprocess)
1534
base_lines = textmerge.base_from_plan()
1537
return lines, base_lines
1539
def text_merge(self, file_id, trans_id):
1540
"""Perform a (weave) text merge for a given file and file-id.
1541
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1542
and a conflict will be noted.
1544
lines, base_lines = self._merged_lines(file_id)
1546
# Note we're checking whether the OUTPUT is binary in this case,
1547
# because we don't want to get into weave merge guts.
1548
textfile.check_text_lines(lines)
1549
self.tt.create_file(lines, trans_id)
1550
if base_lines is not None:
1552
self._raw_conflicts.append(('text conflict', trans_id))
1553
name = self.tt.final_name(trans_id)
1554
parent_id = self.tt.final_parent(trans_id)
1555
file_group = self._dump_conflicts(name, parent_id, file_id,
1557
base_lines=base_lines)
1558
file_group.append(trans_id)
1561
class LCAMerger(WeaveMerger):
1563
def _generate_merge_plan(self, file_id, base):
1564
return self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1567
class Diff3Merger(Merge3Merger):
1568
"""Three-way merger using external diff3 for text merging"""
1570
def dump_file(self, temp_dir, name, tree, file_id):
1571
out_path = osutils.pathjoin(temp_dir, name)
1572
out_file = open(out_path, "wb")
1574
in_file = tree.get_file(file_id)
1575
for line in in_file:
1576
out_file.write(line)
1581
def text_merge(self, file_id, trans_id):
1582
"""Perform a diff3 merge using a specified file-id and trans-id.
1583
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1584
will be dumped, and a will be conflict noted.
1587
temp_dir = osutils.mkdtemp(prefix="bzr-")
1589
new_file = osutils.pathjoin(temp_dir, "new")
1590
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1591
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1592
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1593
status = bzrlib.patch.diff3(new_file, this, base, other)
1594
if status not in (0, 1):
1595
raise errors.BzrError("Unhandled diff3 exit code")
1596
f = open(new_file, 'rb')
1598
self.tt.create_file(f, trans_id)
1602
name = self.tt.final_name(trans_id)
1603
parent_id = self.tt.final_parent(trans_id)
1604
self._dump_conflicts(name, parent_id, file_id)
1605
self._raw_conflicts.append(('text conflict', trans_id))
1607
osutils.rmtree(temp_dir)
1610
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1612
merge_type=Merge3Merger,
1613
interesting_ids=None,
1617
interesting_files=None,
1619
pb=progress.DummyProgress(),
1620
change_reporter=None):
1621
"""Primary interface for merging.
1623
typical use is probably
1624
'merge_inner(branch, branch.get_revision_tree(other_revision),
1625
branch.get_revision_tree(base_revision))'
1627
if this_tree is None:
1628
raise errors.BzrError("bzrlib.merge.merge_inner requires a this_tree "
1629
"parameter as of bzrlib version 0.8.")
1630
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1631
pb=pb, change_reporter=change_reporter)
1632
merger.backup_files = backup_files
1633
merger.merge_type = merge_type
1634
merger.interesting_ids = interesting_ids
1635
merger.ignore_zero = ignore_zero
1636
if interesting_files:
1638
raise ValueError('Only supply interesting_ids'
1639
' or interesting_files')
1640
merger.interesting_files = interesting_files
1641
merger.show_base = show_base
1642
merger.reprocess = reprocess
1643
merger.other_rev_id = other_rev_id
1644
merger.other_basis = other_rev_id
1645
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1646
if get_revision_id is None:
1647
get_revision_id = base_tree.last_revision
1648
merger.cache_trees_with_revision_ids([other_tree, base_tree, this_tree])
1649
merger.set_base_revision(get_revision_id(), this_branch)
1650
return merger.do_merge()
1652
def get_merge_type_registry():
1653
"""Merge type registry is in bzrlib.option to avoid circular imports.
1655
This method provides a sanctioned way to retrieve it.
1657
from bzrlib import option
1658
return option._merge_type_registry
1661
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1662
def status_a(revision, text):
1663
if revision in ancestors_b:
1664
return 'killed-b', text
1666
return 'new-a', text
1668
def status_b(revision, text):
1669
if revision in ancestors_a:
1670
return 'killed-a', text
1672
return 'new-b', text
1674
plain_a = [t for (a, t) in annotated_a]
1675
plain_b = [t for (a, t) in annotated_b]
1676
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1677
blocks = matcher.get_matching_blocks()
1680
for ai, bi, l in blocks:
1681
# process all mismatched sections
1682
# (last mismatched section is handled because blocks always
1683
# includes a 0-length last block)
1684
for revision, text in annotated_a[a_cur:ai]:
1685
yield status_a(revision, text)
1686
for revision, text in annotated_b[b_cur:bi]:
1687
yield status_b(revision, text)
1688
# and now the matched section
1691
for text_a in plain_a[ai:a_cur]:
1692
yield "unchanged", text_a
1695
class _PlanMergeBase(object):
1697
def __init__(self, a_rev, b_rev, vf, key_prefix):
1700
:param a_rev: Revision-id of one revision to merge
1701
:param b_rev: Revision-id of the other revision to merge
1702
:param vf: A VersionedFiles containing both revisions
1703
:param key_prefix: A prefix for accessing keys in vf, typically
1709
self._last_lines = None
1710
self._last_lines_revision_id = None
1711
self._cached_matching_blocks = {}
1712
self._key_prefix = key_prefix
1713
self._precache_tip_lines()
1715
def _precache_tip_lines(self):
1716
lines = self.get_lines([self.a_rev, self.b_rev])
1717
self.lines_a = lines[self.a_rev]
1718
self.lines_b = lines[self.b_rev]
1720
def get_lines(self, revisions):
1721
"""Get lines for revisions from the backing VersionedFiles.
1723
:raises RevisionNotPresent: on absent texts.
1725
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1727
for record in self.vf.get_record_stream(keys, 'unordered', True):
1728
if record.storage_kind == 'absent':
1729
raise errors.RevisionNotPresent(record.key, self.vf)
1730
result[record.key[-1]] = osutils.chunks_to_lines(
1731
record.get_bytes_as('chunked'))
1734
def plan_merge(self):
1735
"""Generate a 'plan' for merging the two revisions.
1737
This involves comparing their texts and determining the cause of
1738
differences. If text A has a line and text B does not, then either the
1739
line was added to text A, or it was deleted from B. Once the causes
1740
are combined, they are written out in the format described in
1741
VersionedFile.plan_merge
1743
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1744
unique_a, unique_b = self._unique_lines(blocks)
1745
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1746
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1747
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1749
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1752
for i, j, n in blocks:
1753
for a_index in range(last_i, i):
1754
if a_index in new_a:
1755
if a_index in killed_b:
1756
yield 'conflicted-a', self.lines_a[a_index]
1758
yield 'new-a', self.lines_a[a_index]
1760
yield 'killed-b', self.lines_a[a_index]
1761
for b_index in range(last_j, j):
1762
if b_index in new_b:
1763
if b_index in killed_a:
1764
yield 'conflicted-b', self.lines_b[b_index]
1766
yield 'new-b', self.lines_b[b_index]
1768
yield 'killed-a', self.lines_b[b_index]
1769
# handle common lines
1770
for a_index in range(i, i+n):
1771
yield 'unchanged', self.lines_a[a_index]
1775
def _get_matching_blocks(self, left_revision, right_revision):
1776
"""Return a description of which sections of two revisions match.
1778
See SequenceMatcher.get_matching_blocks
1780
cached = self._cached_matching_blocks.get((left_revision,
1782
if cached is not None:
1784
if self._last_lines_revision_id == left_revision:
1785
left_lines = self._last_lines
1786
right_lines = self.get_lines([right_revision])[right_revision]
1788
lines = self.get_lines([left_revision, right_revision])
1789
left_lines = lines[left_revision]
1790
right_lines = lines[right_revision]
1791
self._last_lines = right_lines
1792
self._last_lines_revision_id = right_revision
1793
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1795
return matcher.get_matching_blocks()
1797
def _unique_lines(self, matching_blocks):
1798
"""Analyse matching_blocks to determine which lines are unique
1800
:return: a tuple of (unique_left, unique_right), where the values are
1801
sets of line numbers of unique lines.
1807
for i, j, n in matching_blocks:
1808
unique_left.extend(range(last_i, i))
1809
unique_right.extend(range(last_j, j))
1812
return unique_left, unique_right
1815
def _subtract_plans(old_plan, new_plan):
1816
"""Remove changes from new_plan that came from old_plan.
1818
It is assumed that the difference between the old_plan and new_plan
1819
is their choice of 'b' text.
1821
All lines from new_plan that differ from old_plan are emitted
1822
verbatim. All lines from new_plan that match old_plan but are
1823
not about the 'b' revision are emitted verbatim.
1825
Lines that match and are about the 'b' revision are the lines we
1826
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1827
is skipped entirely.
1829
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1832
for i, j, n in matcher.get_matching_blocks():
1833
for jj in range(last_j, j):
1835
for jj in range(j, j+n):
1836
plan_line = new_plan[jj]
1837
if plan_line[0] == 'new-b':
1839
elif plan_line[0] == 'killed-b':
1840
yield 'unchanged', plan_line[1]
1846
class _PlanMerge(_PlanMergeBase):
1847
"""Plan an annotate merge using on-the-fly annotation"""
1849
def __init__(self, a_rev, b_rev, vf, key_prefix):
1850
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1851
self.a_key = self._key_prefix + (self.a_rev,)
1852
self.b_key = self._key_prefix + (self.b_rev,)
1853
self.graph = _mod_graph.Graph(self.vf)
1854
heads = self.graph.heads((self.a_key, self.b_key))
1856
# one side dominates, so we can just return its values, yay for
1858
# Ideally we would know that before we get this far
1859
self._head_key = heads.pop()
1860
if self._head_key == self.a_key:
1864
trace.mutter('found dominating revision for %s\n%s > %s', self.vf,
1865
self._head_key[-1], other)
1868
self._head_key = None
1871
def _precache_tip_lines(self):
1872
# Turn this into a no-op, because we will do this later
1875
def _find_recursive_lcas(self):
1876
"""Find all the ancestors back to a unique lca"""
1877
cur_ancestors = (self.a_key, self.b_key)
1878
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1879
# rather than a key tuple. We will just map that directly to no common
1883
next_lcas = self.graph.find_lca(*cur_ancestors)
1884
# Map a plain NULL_REVISION to a simple no-ancestors
1885
if next_lcas == set([_mod_revision.NULL_REVISION]):
1887
# Order the lca's based on when they were merged into the tip
1888
# While the actual merge portion of weave merge uses a set() of
1889
# active revisions, the order of insertion *does* effect the
1890
# implicit ordering of the texts.
1891
for rev_key in cur_ancestors:
1892
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1894
parent_map[rev_key] = ordered_parents
1895
if len(next_lcas) == 0:
1897
elif len(next_lcas) == 1:
1898
parent_map[list(next_lcas)[0]] = ()
1900
elif len(next_lcas) > 2:
1901
# More than 2 lca's, fall back to grabbing all nodes between
1902
# this and the unique lca.
1903
trace.mutter('More than 2 LCAs, falling back to all nodes for:'
1905
self.a_key, self.b_key, cur_ancestors)
1906
cur_lcas = next_lcas
1907
while len(cur_lcas) > 1:
1908
cur_lcas = self.graph.find_lca(*cur_lcas)
1909
if len(cur_lcas) == 0:
1910
# No common base to find, use the full ancestry
1913
unique_lca = list(cur_lcas)[0]
1914
if unique_lca == _mod_revision.NULL_REVISION:
1915
# find_lca will return a plain 'NULL_REVISION' rather
1916
# than a key tuple when there is no common ancestor, we
1917
# prefer to just use None, because it doesn't confuse
1918
# _get_interesting_texts()
1920
parent_map.update(self._find_unique_parents(next_lcas,
1923
cur_ancestors = next_lcas
1926
def _find_unique_parents(self, tip_keys, base_key):
1927
"""Find ancestors of tip that aren't ancestors of base.
1929
:param tip_keys: Nodes that are interesting
1930
:param base_key: Cull all ancestors of this node
1931
:return: The parent map for all revisions between tip_keys and
1932
base_key. base_key will be included. References to nodes outside of
1933
the ancestor set will also be removed.
1935
# TODO: this would be simpler if find_unique_ancestors took a list
1936
# instead of a single tip, internally it supports it, but it
1937
# isn't a "backwards compatible" api change.
1938
if base_key is None:
1939
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1940
# We remove NULL_REVISION because it isn't a proper tuple key, and
1941
# thus confuses things like _get_interesting_texts, and our logic
1942
# to add the texts into the memory weave.
1943
if _mod_revision.NULL_REVISION in parent_map:
1944
parent_map.pop(_mod_revision.NULL_REVISION)
1947
for tip in tip_keys:
1949
self.graph.find_unique_ancestors(tip, [base_key]))
1950
parent_map = self.graph.get_parent_map(interesting)
1951
parent_map[base_key] = ()
1952
culled_parent_map, child_map, tails = self._remove_external_references(
1954
# Remove all the tails but base_key
1955
if base_key is not None:
1956
tails.remove(base_key)
1957
self._prune_tails(culled_parent_map, child_map, tails)
1958
# Now remove all the uninteresting 'linear' regions
1959
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1963
def _remove_external_references(parent_map):
1964
"""Remove references that go outside of the parent map.
1966
:param parent_map: Something returned from Graph.get_parent_map(keys)
1967
:return: (filtered_parent_map, child_map, tails)
1968
filtered_parent_map is parent_map without external references
1969
child_map is the {parent_key: [child_keys]} mapping
1970
tails is a list of nodes that do not have any parents in the map
1972
# TODO: The basic effect of this function seems more generic than
1973
# _PlanMerge. But the specific details of building a child_map,
1974
# and computing tails seems very specific to _PlanMerge.
1975
# Still, should this be in Graph land?
1976
filtered_parent_map = {}
1979
for key, parent_keys in parent_map.iteritems():
1980
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1981
if not culled_parent_keys:
1983
for parent_key in culled_parent_keys:
1984
child_map.setdefault(parent_key, []).append(key)
1985
# TODO: Do we want to do this, it adds overhead for every node,
1986
# just to say that the node has no children
1987
child_map.setdefault(key, [])
1988
filtered_parent_map[key] = culled_parent_keys
1989
return filtered_parent_map, child_map, tails
1992
def _prune_tails(parent_map, child_map, tails_to_remove):
1993
"""Remove tails from the parent map.
1995
This will remove the supplied revisions until no more children have 0
1998
:param parent_map: A dict of {child: [parents]}, this dictionary will
1999
be modified in place.
2000
:param tails_to_remove: A list of tips that should be removed,
2001
this list will be consumed
2002
:param child_map: The reverse dict of parent_map ({parent: [children]})
2003
this dict will be modified
2004
:return: None, parent_map will be modified in place.
2006
while tails_to_remove:
2007
next = tails_to_remove.pop()
2008
parent_map.pop(next)
2009
children = child_map.pop(next)
2010
for child in children:
2011
child_parents = parent_map[child]
2012
child_parents.remove(next)
2013
if len(child_parents) == 0:
2014
tails_to_remove.append(child)
2016
def _get_interesting_texts(self, parent_map):
2017
"""Return a dict of texts we are interested in.
2019
Note that the input is in key tuples, but the output is in plain
2022
:param parent_map: The output from _find_recursive_lcas
2023
:return: A dict of {'revision_id':lines} as returned by
2024
_PlanMergeBase.get_lines()
2026
all_revision_keys = set(parent_map)
2027
all_revision_keys.add(self.a_key)
2028
all_revision_keys.add(self.b_key)
2030
# Everything else is in 'keys' but get_lines is in 'revision_ids'
2031
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
2034
def _build_weave(self):
2035
from bzrlib import weave
2036
self._weave = weave.Weave(weave_name='in_memory_weave',
2037
allow_reserved=True)
2038
parent_map = self._find_recursive_lcas()
2040
all_texts = self._get_interesting_texts(parent_map)
2042
# Note: Unfortunately, the order given by topo_sort will effect the
2043
# ordering resolution in the output. Specifically, if you add A then B,
2044
# then in the output text A lines will show up before B lines. And, of
2045
# course, topo_sort doesn't guarantee any real ordering.
2046
# So we use merge_sort, and add a fake node on the tip.
2047
# This ensures that left-hand parents will always be inserted into the
2048
# weave before right-hand parents.
2049
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
2050
parent_map[tip_key] = (self.a_key, self.b_key)
2052
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
2056
# for key in tsort.topo_sort(parent_map):
2057
parent_keys = parent_map[key]
2058
revision_id = key[-1]
2059
parent_ids = [k[-1] for k in parent_keys]
2060
self._weave.add_lines(revision_id, parent_ids,
2061
all_texts[revision_id])
2063
def plan_merge(self):
2064
"""Generate a 'plan' for merging the two revisions.
2066
This involves comparing their texts and determining the cause of
2067
differences. If text A has a line and text B does not, then either the
2068
line was added to text A, or it was deleted from B. Once the causes
2069
are combined, they are written out in the format described in
2070
VersionedFile.plan_merge
2072
if self._head_key is not None: # There was a single head
2073
if self._head_key == self.a_key:
2076
if self._head_key != self.b_key:
2077
raise AssertionError('There was an invalid head: %s != %s'
2078
% (self.b_key, self._head_key))
2080
head_rev = self._head_key[-1]
2081
lines = self.get_lines([head_rev])[head_rev]
2082
return ((plan, line) for line in lines)
2083
return self._weave.plan_merge(self.a_rev, self.b_rev)
2086
class _PlanLCAMerge(_PlanMergeBase):
2088
This merge algorithm differs from _PlanMerge in that:
2089
1. comparisons are done against LCAs only
2090
2. cases where a contested line is new versus one LCA but old versus
2091
another are marked as conflicts, by emitting the line as conflicted-a
2094
This is faster, and hopefully produces more useful output.
2097
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
2098
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
2099
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
2102
if lca == _mod_revision.NULL_REVISION:
2105
self.lcas.add(lca[-1])
2106
for lca in self.lcas:
2107
if _mod_revision.is_null(lca):
2110
lca_lines = self.get_lines([lca])[lca]
2111
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
2113
blocks = list(matcher.get_matching_blocks())
2114
self._cached_matching_blocks[(a_rev, lca)] = blocks
2115
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2117
blocks = list(matcher.get_matching_blocks())
2118
self._cached_matching_blocks[(b_rev, lca)] = blocks
2120
def _determine_status(self, revision_id, unique_line_numbers):
2121
"""Determines the status unique lines versus all lcas.
2123
Basically, determines why the line is unique to this revision.
2125
A line may be determined new, killed, or both.
2127
If a line is determined new, that means it was not present in at least
2128
one LCA, and is not present in the other merge revision.
2130
If a line is determined killed, that means the line was present in
2133
If a line is killed and new, this indicates that the two merge
2134
revisions contain differing conflict resolutions.
2135
:param revision_id: The id of the revision in which the lines are
2137
:param unique_line_numbers: The line numbers of unique lines.
2138
:return a tuple of (new_this, killed_other):
2142
unique_line_numbers = set(unique_line_numbers)
2143
for lca in self.lcas:
2144
blocks = self._get_matching_blocks(revision_id, lca)
2145
unique_vs_lca, _ignored = self._unique_lines(blocks)
2146
new.update(unique_line_numbers.intersection(unique_vs_lca))
2147
killed.update(unique_line_numbers.difference(unique_vs_lca))