bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
| 
1711.2.16
by John Arbash Meinel
 test_diff needs a copyright statement  | 
1  | 
# Copyright (C) 2005, 2006 Canonical Development Ltd
 | 
2  | 
#
 | 
|
3  | 
# This program is free software; you can redistribute it and/or modify
 | 
|
4  | 
# it under the terms of the GNU General Public License as published by
 | 
|
5  | 
# the Free Software Foundation; either version 2 of the License, or
 | 
|
6  | 
# (at your option) any later version.
 | 
|
7  | 
#
 | 
|
8  | 
# This program is distributed in the hope that it will be useful,
 | 
|
9  | 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
10  | 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|
11  | 
# GNU General Public License for more details.
 | 
|
12  | 
||
13  | 
# You should have received a copy of the GNU General Public License
 | 
|
14  | 
# along with this program; if not, write to the Free Software
 | 
|
15  | 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 | 
|
16  | 
||
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
17  | 
import os  | 
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
18  | 
from cStringIO import StringIO  | 
| 
1692.8.7
by James Henstridge
 changes suggested by John Meinel  | 
19  | 
import errno  | 
| 
1692.8.5
by James Henstridge
 merge from bzr.dev  | 
20  | 
from tempfile import TemporaryFile  | 
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
21  | 
|
| 
1692.8.5
by James Henstridge
 merge from bzr.dev  | 
22  | 
from bzrlib.diff import internal_diff, external_diff, show_diff_trees  | 
| 
1711.2.56
by John Arbash Meinel
 Raise NoDiff if 'diff' not present.  | 
23  | 
from bzrlib.errors import BinaryFile, NoDiff  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
24  | 
import bzrlib.patiencediff  | 
| 
1711.2.54
by John Arbash Meinel
 Use mkstemp instead of NamedTemporary file for external diff.  | 
25  | 
from bzrlib.tests import (TestCase, TestCaseWithTransport,  | 
26  | 
TestCaseInTempDir, TestSkipped)  | 
|
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
27  | 
|
28  | 
||
| 
1558.15.11
by Aaron Bentley
 Apply merge review suggestions  | 
29  | 
def udiff_lines(old, new, allow_binary=False):  | 
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
30  | 
output = StringIO()  | 
| 
1558.15.11
by Aaron Bentley
 Apply merge review suggestions  | 
31  | 
internal_diff('old', old, 'new', new, output, allow_binary)  | 
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
32  | 
output.seek(0, 0)  | 
33  | 
return output.readlines()  | 
|
34  | 
||
| 
1711.2.54
by John Arbash Meinel
 Use mkstemp instead of NamedTemporary file for external diff.  | 
35  | 
|
| 
1711.2.57
by John Arbash Meinel
 Allow external diff to write to a file without a fileno.  | 
36  | 
def external_udiff_lines(old, new, use_stringio=False):  | 
37  | 
if use_stringio:  | 
|
38  | 
        # StringIO has no fileno, so it tests a different codepath
 | 
|
39  | 
output = StringIO()  | 
|
40  | 
else:  | 
|
41  | 
output = TemporaryFile()  | 
|
| 
1692.8.7
by James Henstridge
 changes suggested by John Meinel  | 
42  | 
try:  | 
43  | 
external_diff('old', old, 'new', new, output, diff_opts=['-u'])  | 
|
| 
1711.2.58
by John Arbash Meinel
 Use osutils.pumpfile so we don't have to buffer everything in ram  | 
44  | 
except NoDiff:  | 
| 
1711.2.56
by John Arbash Meinel
 Raise NoDiff if 'diff' not present.  | 
45  | 
raise TestSkipped('external "diff" not present to test')  | 
| 
1692.8.2
by James Henstridge
 add a test for sending external diff output to a file  | 
46  | 
output.seek(0, 0)  | 
47  | 
lines = output.readlines()  | 
|
48  | 
output.close()  | 
|
49  | 
return lines  | 
|
50  | 
||
51  | 
||
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
52  | 
class TestDiff(TestCase):  | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
53  | 
|
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
54  | 
def test_add_nl(self):  | 
55  | 
"""diff generates a valid diff for patches that add a newline"""  | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
56  | 
lines = udiff_lines(['boo'], ['boo\n'])  | 
| 
1185.16.145
by Martin Pool
 Remove all assert statements from test cases.  | 
57  | 
self.check_patch(lines)  | 
58  | 
self.assertEquals(lines[4], '\\ No newline at end of file\n')  | 
|
59  | 
            ## "expected no-nl, got %r" % lines[4]
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
60  | 
|
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
61  | 
def test_add_nl_2(self):  | 
62  | 
"""diff generates a valid diff for patches that change last line and  | 
|
63  | 
        add a newline.
 | 
|
64  | 
        """
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
65  | 
lines = udiff_lines(['boo'], ['goo\n'])  | 
| 
1185.16.145
by Martin Pool
 Remove all assert statements from test cases.  | 
66  | 
self.check_patch(lines)  | 
67  | 
self.assertEquals(lines[4], '\\ No newline at end of file\n')  | 
|
68  | 
            ## "expected no-nl, got %r" % lines[4]
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
69  | 
|
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
70  | 
def test_remove_nl(self):  | 
71  | 
"""diff generates a valid diff for patches that change last line and  | 
|
72  | 
        add a newline.
 | 
|
73  | 
        """
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
74  | 
lines = udiff_lines(['boo\n'], ['boo'])  | 
| 
1185.16.145
by Martin Pool
 Remove all assert statements from test cases.  | 
75  | 
self.check_patch(lines)  | 
76  | 
self.assertEquals(lines[5], '\\ No newline at end of file\n')  | 
|
77  | 
            ## "expected no-nl, got %r" % lines[5]
 | 
|
78  | 
||
79  | 
def check_patch(self, lines):  | 
|
80  | 
self.assert_(len(lines) > 1)  | 
|
81  | 
            ## "Not enough lines for a file header for patch:\n%s" % "".join(lines)
 | 
|
82  | 
self.assert_(lines[0].startswith ('---'))  | 
|
83  | 
            ## 'No orig line for patch:\n%s' % "".join(lines)
 | 
|
84  | 
self.assert_(lines[1].startswith ('+++'))  | 
|
85  | 
            ## 'No mod line for patch:\n%s' % "".join(lines)
 | 
|
86  | 
self.assert_(len(lines) > 2)  | 
|
87  | 
            ## "No hunks for patch:\n%s" % "".join(lines)
 | 
|
88  | 
self.assert_(lines[2].startswith('@@'))  | 
|
89  | 
            ## "No hunk header for patch:\n%s" % "".join(lines)
 | 
|
90  | 
self.assert_('@@' in lines[2][2:])  | 
|
91  | 
            ## "Unterminated hunk header for patch:\n%s" % "".join(lines)
 | 
|
92  | 
||
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
93  | 
def test_binary_lines(self):  | 
94  | 
self.assertRaises(BinaryFile, udiff_lines, [1023 * 'a' + '\x00'], [])  | 
|
95  | 
self.assertRaises(BinaryFile, udiff_lines, [], [1023 * 'a' + '\x00'])  | 
|
| 
1558.15.11
by Aaron Bentley
 Apply merge review suggestions  | 
96  | 
udiff_lines([1023 * 'a' + '\x00'], [], allow_binary=True)  | 
97  | 
udiff_lines([], [1023 * 'a' + '\x00'], allow_binary=True)  | 
|
| 
1692.8.2
by James Henstridge
 add a test for sending external diff output to a file  | 
98  | 
|
99  | 
def test_external_diff(self):  | 
|
100  | 
lines = external_udiff_lines(['boo\n'], ['goo\n'])  | 
|
101  | 
self.check_patch(lines)  | 
|
| 
1711.2.57
by John Arbash Meinel
 Allow external diff to write to a file without a fileno.  | 
102  | 
|
103  | 
def test_external_diff_no_fileno(self):  | 
|
104  | 
        # Make sure that we can handle not having a fileno, even
 | 
|
105  | 
        # if the diff is large
 | 
|
106  | 
lines = external_udiff_lines(['boo\n']*10000,  | 
|
107  | 
['goo\n']*10000,  | 
|
108  | 
use_stringio=True)  | 
|
109  | 
self.check_patch(lines)  | 
|
| 
1692.8.2
by James Henstridge
 add a test for sending external diff output to a file  | 
110  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
111  | 
def test_internal_diff_default(self):  | 
112  | 
        # Default internal diff encoding is utf8
 | 
|
113  | 
output = StringIO()  | 
|
114  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
115  | 
u'new_\xe5', ['new_text\n'], output)  | 
|
116  | 
lines = output.getvalue().splitlines(True)  | 
|
117  | 
self.check_patch(lines)  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
118  | 
self.assertEquals(['--- old_\xc2\xb5\n',  | 
119  | 
'+++ new_\xc3\xa5\n',  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
120  | 
'@@ -1,1 +1,1 @@\n',  | 
121  | 
'-old_text\n',  | 
|
122  | 
'+new_text\n',  | 
|
123  | 
'\n',  | 
|
124  | 
                          ]
 | 
|
125  | 
, lines)  | 
|
126  | 
||
127  | 
def test_internal_diff_utf8(self):  | 
|
128  | 
output = StringIO()  | 
|
129  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
130  | 
u'new_\xe5', ['new_text\n'], output,  | 
|
131  | 
path_encoding='utf8')  | 
|
132  | 
lines = output.getvalue().splitlines(True)  | 
|
133  | 
self.check_patch(lines)  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
134  | 
self.assertEquals(['--- old_\xc2\xb5\n',  | 
135  | 
'+++ new_\xc3\xa5\n',  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
136  | 
'@@ -1,1 +1,1 @@\n',  | 
137  | 
'-old_text\n',  | 
|
138  | 
'+new_text\n',  | 
|
139  | 
'\n',  | 
|
140  | 
                          ]
 | 
|
141  | 
, lines)  | 
|
142  | 
||
143  | 
def test_internal_diff_iso_8859_1(self):  | 
|
144  | 
output = StringIO()  | 
|
145  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
146  | 
u'new_\xe5', ['new_text\n'], output,  | 
|
147  | 
path_encoding='iso-8859-1')  | 
|
148  | 
lines = output.getvalue().splitlines(True)  | 
|
149  | 
self.check_patch(lines)  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
150  | 
self.assertEquals(['--- old_\xb5\n',  | 
151  | 
'+++ new_\xe5\n',  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
152  | 
'@@ -1,1 +1,1 @@\n',  | 
153  | 
'-old_text\n',  | 
|
154  | 
'+new_text\n',  | 
|
155  | 
'\n',  | 
|
156  | 
                          ]
 | 
|
157  | 
, lines)  | 
|
158  | 
||
159  | 
def test_internal_diff_returns_bytes(self):  | 
|
160  | 
import StringIO  | 
|
161  | 
output = StringIO.StringIO()  | 
|
162  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
163  | 
u'new_\xe5', ['new_text\n'], output)  | 
|
164  | 
self.failUnless(isinstance(output.getvalue(), str),  | 
|
165  | 
'internal_diff should return bytestrings')  | 
|
166  | 
||
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
167  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
168  | 
class TestDiffDates(TestCaseWithTransport):  | 
169  | 
||
170  | 
def setUp(self):  | 
|
171  | 
super(TestDiffDates, self).setUp()  | 
|
172  | 
self.wt = self.make_branch_and_tree('.')  | 
|
173  | 
self.b = self.wt.branch  | 
|
174  | 
self.build_tree_contents([  | 
|
175  | 
('file1', 'file1 contents at rev 1\n'),  | 
|
176  | 
('file2', 'file2 contents at rev 1\n')  | 
|
177  | 
            ])
 | 
|
178  | 
self.wt.add(['file1', 'file2'])  | 
|
179  | 
self.wt.commit(  | 
|
180  | 
message='Revision 1',  | 
|
181  | 
timestamp=1143849600, # 2006-04-01 00:00:00 UTC  | 
|
182  | 
timezone=0,  | 
|
183  | 
rev_id='rev-1')  | 
|
184  | 
self.build_tree_contents([('file1', 'file1 contents at rev 2\n')])  | 
|
185  | 
self.wt.commit(  | 
|
186  | 
message='Revision 2',  | 
|
187  | 
timestamp=1143936000, # 2006-04-02 00:00:00 UTC  | 
|
188  | 
timezone=28800,  | 
|
189  | 
rev_id='rev-2')  | 
|
190  | 
self.build_tree_contents([('file2', 'file2 contents at rev 3\n')])  | 
|
191  | 
self.wt.commit(  | 
|
192  | 
message='Revision 3',  | 
|
193  | 
timestamp=1144022400, # 2006-04-03 00:00:00 UTC  | 
|
194  | 
timezone=-3600,  | 
|
195  | 
rev_id='rev-3')  | 
|
196  | 
self.wt.remove(['file2'])  | 
|
197  | 
self.wt.commit(  | 
|
198  | 
message='Revision 4',  | 
|
199  | 
timestamp=1144108800, # 2006-04-04 00:00:00 UTC  | 
|
200  | 
timezone=0,  | 
|
201  | 
rev_id='rev-4')  | 
|
202  | 
self.build_tree_contents([  | 
|
203  | 
('file1', 'file1 contents in working tree\n')  | 
|
204  | 
            ])
 | 
|
205  | 
        # set the date stamps for files in the working tree to known values
 | 
|
206  | 
os.utime('file1', (1144195200, 1144195200)) # 2006-04-05 00:00:00 UTC  | 
|
207  | 
||
208  | 
def get_diff(self, tree1, tree2):  | 
|
209  | 
output = StringIO()  | 
|
210  | 
show_diff_trees(tree1, tree2, output,  | 
|
211  | 
old_label='old/', new_label='new/')  | 
|
212  | 
return output.getvalue()  | 
|
213  | 
||
214  | 
def test_diff_rev_tree_working_tree(self):  | 
|
215  | 
output = self.get_diff(self.wt.basis_tree(), self.wt)  | 
|
216  | 
        # note that the date for old/file1 is from rev 2 rather than from
 | 
|
217  | 
        # the basis revision (rev 4)
 | 
|
218  | 
self.assertEqualDiff(output, '''\  | 
|
219  | 
=== modified file 'file1'
 | 
|
220  | 
--- old/file1\t2006-04-02 00:00:00 +0000  | 
|
221  | 
+++ new/file1\t2006-04-05 00:00:00 +0000  | 
|
222  | 
@@ -1,1 +1,1 @@
 | 
|
223  | 
-file1 contents at rev 2
 | 
|
224  | 
+file1 contents in working tree
 | 
|
225  | 
||
226  | 
''')  | 
|
227  | 
||
228  | 
def test_diff_rev_tree_rev_tree(self):  | 
|
229  | 
tree1 = self.b.repository.revision_tree('rev-2')  | 
|
230  | 
tree2 = self.b.repository.revision_tree('rev-3')  | 
|
231  | 
output = self.get_diff(tree1, tree2)  | 
|
232  | 
self.assertEqualDiff(output, '''\  | 
|
233  | 
=== modified file 'file2'
 | 
|
234  | 
--- old/file2\t2006-04-01 00:00:00 +0000  | 
|
235  | 
+++ new/file2\t2006-04-03 00:00:00 +0000  | 
|
236  | 
@@ -1,1 +1,1 @@
 | 
|
237  | 
-file2 contents at rev 1
 | 
|
238  | 
+file2 contents at rev 3
 | 
|
239  | 
||
240  | 
''')  | 
|
241  | 
||
242  | 
def test_diff_add_files(self):  | 
|
243  | 
tree1 = self.b.repository.revision_tree(None)  | 
|
244  | 
tree2 = self.b.repository.revision_tree('rev-1')  | 
|
245  | 
output = self.get_diff(tree1, tree2)  | 
|
246  | 
        # the files have the epoch time stamp for the tree in which
 | 
|
247  | 
        # they don't exist.
 | 
|
248  | 
self.assertEqualDiff(output, '''\  | 
|
249  | 
=== added file 'file1'
 | 
|
250  | 
--- old/file1\t1970-01-01 00:00:00 +0000  | 
|
251  | 
+++ new/file1\t2006-04-01 00:00:00 +0000  | 
|
252  | 
@@ -0,0 +1,1 @@
 | 
|
253  | 
+file1 contents at rev 1
 | 
|
254  | 
||
255  | 
=== added file 'file2'
 | 
|
256  | 
--- old/file2\t1970-01-01 00:00:00 +0000  | 
|
257  | 
+++ new/file2\t2006-04-01 00:00:00 +0000  | 
|
258  | 
@@ -0,0 +1,1 @@
 | 
|
259  | 
+file2 contents at rev 1
 | 
|
260  | 
||
261  | 
''')  | 
|
262  | 
||
263  | 
def test_diff_remove_files(self):  | 
|
264  | 
tree1 = self.b.repository.revision_tree('rev-3')  | 
|
265  | 
tree2 = self.b.repository.revision_tree('rev-4')  | 
|
266  | 
output = self.get_diff(tree1, tree2)  | 
|
267  | 
        # the file has the epoch time stamp for the tree in which
 | 
|
268  | 
        # it doesn't exist.
 | 
|
269  | 
self.assertEqualDiff(output, '''\  | 
|
270  | 
=== removed file 'file2'
 | 
|
271  | 
--- old/file2\t2006-04-03 00:00:00 +0000  | 
|
272  | 
+++ new/file2\t1970-01-01 00:00:00 +0000  | 
|
273  | 
@@ -1,1 +0,0 @@
 | 
|
274  | 
-file2 contents at rev 3
 | 
|
275  | 
||
276  | 
''')  | 
|
277  | 
||
278  | 
||
| 
1711.2.15
by John Arbash Meinel
 Found a couple CDV left  | 
279  | 
class TestPatienceDiffLib(TestCase):  | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
280  | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
281  | 
def test_unique_lcs(self):  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
282  | 
unique_lcs = bzrlib.patiencediff.unique_lcs  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
283  | 
self.assertEquals(unique_lcs('', ''), [])  | 
284  | 
self.assertEquals(unique_lcs('a', 'a'), [(0,0)])  | 
|
285  | 
self.assertEquals(unique_lcs('a', 'b'), [])  | 
|
286  | 
self.assertEquals(unique_lcs('ab', 'ab'), [(0,0), (1,1)])  | 
|
287  | 
self.assertEquals(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])  | 
|
288  | 
self.assertEquals(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])  | 
|
289  | 
self.assertEquals(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),  | 
|
290  | 
(3,3), (4,4)])  | 
|
291  | 
self.assertEquals(unique_lcs('acbac', 'abc'), [(2,1)])  | 
|
292  | 
||
293  | 
def test_recurse_matches(self):  | 
|
294  | 
def test_one(a, b, matches):  | 
|
295  | 
test_matches = []  | 
|
| 
1711.2.22
by John Arbash Meinel
 Passing the alo parameter to recurse_matches shaves of 5% of the diff time.  | 
296  | 
bzrlib.patiencediff.recurse_matches(a, b, 0, 0, len(a), len(b),  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
297  | 
test_matches, 10)  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
298  | 
self.assertEquals(test_matches, matches)  | 
299  | 
||
| 
1711.2.17
by John Arbash Meinel
 Small cleanups to patience_diff code.  | 
300  | 
test_one(['a', '', 'b', '', 'c'], ['a', 'a', 'b', 'c', 'c'],  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
301  | 
[(0, 0), (2, 2), (4, 4)])  | 
302  | 
test_one(['a', 'c', 'b', 'a', 'c'], ['a', 'b', 'c'],  | 
|
303  | 
[(0, 0), (2, 1), (4, 2)])  | 
|
304  | 
||
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
305  | 
        # recurse_matches doesn't match non-unique 
 | 
306  | 
        # lines surrounded by bogus text.
 | 
|
| 
1185.81.24
by Aaron Bentley
 Reoganize patience-related code  | 
307  | 
        # The update has been done in patiencediff.SequenceMatcher instead
 | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
308  | 
|
309  | 
        # This is what it could be
 | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
310  | 
        #test_one('aBccDe', 'abccde', [(0,0), (2,2), (3,3), (5,5)])
 | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
311  | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
312  | 
        # This is what it currently gives:
 | 
313  | 
test_one('aBccDe', 'abccde', [(0,0), (5,5)])  | 
|
314  | 
||
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
315  | 
def test_matching_blocks(self):  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
316  | 
def chk_blocks(a, b, expected_blocks):  | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
317  | 
            # difflib always adds a signature of the total
 | 
318  | 
            # length, with no matching entries at the end
 | 
|
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
319  | 
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)  | 
| 
1185.81.11
by John Arbash Meinel
 Found some edge cases that weren't being matched.  | 
320  | 
blocks = s.get_matching_blocks()  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
321  | 
self.assertEquals((len(a), len(b), 0), blocks[-1])  | 
322  | 
self.assertEquals(expected_blocks, blocks[:-1])  | 
|
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
323  | 
|
| 
1185.81.2
by John Arbash Meinel
 A couple small tests.  | 
324  | 
        # Some basic matching tests
 | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
325  | 
chk_blocks('', '', [])  | 
326  | 
chk_blocks([], [], [])  | 
|
327  | 
chk_blocks('abcd', 'abcd', [(0, 0, 4)])  | 
|
328  | 
chk_blocks('abcd', 'abce', [(0, 0, 3)])  | 
|
329  | 
chk_blocks('eabc', 'abce', [(1, 0, 3)])  | 
|
330  | 
chk_blocks('eabce', 'abce', [(1, 0, 4)])  | 
|
331  | 
chk_blocks('abcde', 'abXde', [(0, 0, 2), (3, 3, 2)])  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
332  | 
chk_blocks('abcde', 'abXYZde', [(0, 0, 2), (3, 5, 2)])  | 
333  | 
chk_blocks('abde', 'abXYZde', [(0, 0, 2), (2, 5, 2)])  | 
|
334  | 
        # This may check too much, but it checks to see that 
 | 
|
335  | 
        # a copied block stays attached to the previous section,
 | 
|
336  | 
        # not the later one.
 | 
|
337  | 
        # difflib would tend to grab the trailing longest match
 | 
|
338  | 
        # which would make the diff not look right
 | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
339  | 
chk_blocks('abcdefghijklmnop', 'abcdefxydefghijklmnop',  | 
340  | 
[(0, 0, 6), (6, 11, 10)])  | 
|
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
341  | 
|
| 
1185.81.2
by John Arbash Meinel
 A couple small tests.  | 
342  | 
        # make sure it supports passing in lists
 | 
343  | 
chk_blocks(  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
344  | 
['hello there\n',  | 
345  | 
'world\n',  | 
|
346  | 
'how are you today?\n'],  | 
|
347  | 
['hello there\n',  | 
|
348  | 
'how are you today?\n'],  | 
|
| 
1185.81.2
by John Arbash Meinel
 A couple small tests.  | 
349  | 
[(0, 0, 1), (2, 1, 1)])  | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
350  | 
|
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
351  | 
        # non unique lines surrounded by non-matching lines
 | 
352  | 
        # won't be found
 | 
|
353  | 
chk_blocks('aBccDe', 'abccde', [(0,0,1), (5,5,1)])  | 
|
354  | 
||
355  | 
        # But they only need to be locally unique
 | 
|
356  | 
chk_blocks('aBcDec', 'abcdec', [(0,0,1), (2,2,1), (4,4,2)])  | 
|
357  | 
||
358  | 
        # non unique blocks won't be matched
 | 
|
359  | 
chk_blocks('aBcdEcdFg', 'abcdecdfg', [(0,0,1), (8,8,1)])  | 
|
360  | 
||
361  | 
        # but locally unique ones will
 | 
|
362  | 
chk_blocks('aBcdEeXcdFg', 'abcdecdfg', [(0,0,1), (2,2,2),  | 
|
363  | 
(5,4,1), (7,5,2), (10,8,1)])  | 
|
364  | 
||
365  | 
chk_blocks('abbabbXd', 'cabbabxd', [(7,7,1)])  | 
|
366  | 
chk_blocks('abbabbbb', 'cabbabbc', [])  | 
|
367  | 
chk_blocks('bbbbbbbb', 'cbbbbbbc', [])  | 
|
| 
1185.81.11
by John Arbash Meinel
 Found some edge cases that weren't being matched.  | 
368  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
369  | 
def test_opcodes(self):  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
370  | 
def chk_ops(a, b, expected_codes):  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
371  | 
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
372  | 
self.assertEquals(expected_codes, s.get_opcodes())  | 
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
373  | 
|
374  | 
chk_ops('', '', [])  | 
|
375  | 
chk_ops([], [], [])  | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
376  | 
chk_ops('abcd', 'abcd', [('equal', 0,4, 0,4)])  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
377  | 
chk_ops('abcd', 'abce', [('equal', 0,3, 0,3),  | 
378  | 
('replace', 3,4, 3,4)  | 
|
379  | 
                                ])
 | 
|
380  | 
chk_ops('eabc', 'abce', [('delete', 0,1, 0,0),  | 
|
381  | 
('equal', 1,4, 0,3),  | 
|
382  | 
('insert', 4,4, 3,4)  | 
|
383  | 
                                ])
 | 
|
384  | 
chk_ops('eabce', 'abce', [('delete', 0,1, 0,0),  | 
|
385  | 
('equal', 1,5, 0,4)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
386  | 
                                 ])
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
387  | 
chk_ops('abcde', 'abXde', [('equal', 0,2, 0,2),  | 
388  | 
('replace', 2,3, 2,3),  | 
|
389  | 
('equal', 3,5, 3,5)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
390  | 
                                  ])
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
391  | 
chk_ops('abcde', 'abXYZde', [('equal', 0,2, 0,2),  | 
392  | 
('replace', 2,3, 2,5),  | 
|
393  | 
('equal', 3,5, 5,7)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
394  | 
                                    ])
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
395  | 
chk_ops('abde', 'abXYZde', [('equal', 0,2, 0,2),  | 
396  | 
('insert', 2,2, 2,5),  | 
|
397  | 
('equal', 2,4, 5,7)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
398  | 
                                   ])
 | 
399  | 
chk_ops('abcdefghijklmnop', 'abcdefxydefghijklmnop',  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
400  | 
[('equal', 0,6, 0,6),  | 
401  | 
('insert', 6,6, 6,11),  | 
|
402  | 
('equal', 6,16, 11,21)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
403  | 
                ])
 | 
404  | 
chk_ops(  | 
|
405  | 
[ 'hello there\n'  | 
|
406  | 
, 'world\n'  | 
|
407  | 
, 'how are you today?\n'],  | 
|
408  | 
[ 'hello there\n'  | 
|
409  | 
, 'how are you today?\n'],  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
410  | 
[('equal', 0,1, 0,1),  | 
411  | 
('delete', 1,2, 1,1),  | 
|
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
412  | 
('equal', 2,3, 1,2),  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
413  | 
                ])
 | 
414  | 
chk_ops('aBccDe', 'abccde',  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
415  | 
[('equal', 0,1, 0,1),  | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
416  | 
('replace', 1,5, 1,5),  | 
417  | 
('equal', 5,6, 5,6),  | 
|
418  | 
                ])
 | 
|
419  | 
chk_ops('aBcDec', 'abcdec',  | 
|
420  | 
[('equal', 0,1, 0,1),  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
421  | 
('replace', 1,2, 1,2),  | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
422  | 
('equal', 2,3, 2,3),  | 
423  | 
('replace', 3,4, 3,4),  | 
|
424  | 
('equal', 4,6, 4,6),  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
425  | 
                ])
 | 
| 
1185.81.10
by John Arbash Meinel
 Added some more test cases.  | 
426  | 
chk_ops('aBcdEcdFg', 'abcdecdfg',  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
427  | 
[('equal', 0,1, 0,1),  | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
428  | 
('replace', 1,8, 1,8),  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
429  | 
('equal', 8,9, 8,9)  | 
| 
1185.81.10
by John Arbash Meinel
 Added some more test cases.  | 
430  | 
                ])
 | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
431  | 
chk_ops('aBcdEeXcdFg', 'abcdecdfg',  | 
432  | 
[('equal', 0,1, 0,1),  | 
|
433  | 
('replace', 1,2, 1,2),  | 
|
434  | 
('equal', 2,4, 2,4),  | 
|
435  | 
('delete', 4,5, 4,4),  | 
|
436  | 
('equal', 5,6, 4,5),  | 
|
437  | 
('delete', 6,7, 5,5),  | 
|
438  | 
('equal', 7,9, 5,7),  | 
|
439  | 
('replace', 9,10, 7,8),  | 
|
440  | 
('equal', 10,11, 8,9)  | 
|
441  | 
                ])
 | 
|
| 
1185.81.10
by John Arbash Meinel
 Added some more test cases.  | 
442  | 
|
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
443  | 
def test_multiple_ranges(self):  | 
444  | 
        # There was an earlier bug where we used a bad set of ranges,
 | 
|
445  | 
        # this triggers that specific bug, to make sure it doesn't regress
 | 
|
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
446  | 
def chk_blocks(a, b, expected_blocks):  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
447  | 
            # difflib always adds a signature of the total
 | 
448  | 
            # length, with no matching entries at the end
 | 
|
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
449  | 
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
450  | 
blocks = s.get_matching_blocks()  | 
451  | 
x = blocks.pop()  | 
|
452  | 
self.assertEquals(x, (len(a), len(b), 0))  | 
|
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
453  | 
self.assertEquals(expected_blocks, blocks)  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
454  | 
|
455  | 
chk_blocks('abcdefghijklmnop'  | 
|
456  | 
, 'abcXghiYZQRSTUVWXYZijklmnop'  | 
|
457  | 
, [(0, 0, 3), (6, 4, 3), (9, 20, 7)])  | 
|
458  | 
||
459  | 
chk_blocks('ABCd efghIjk L'  | 
|
460  | 
, 'AxyzBCn mo pqrstuvwI1 2 L'  | 
|
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
461  | 
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
462  | 
|
| 
1711.2.8
by John Arbash Meinel
 rot13 the code snippet to help with clarity.  | 
463  | 
        # These are rot13 code snippets.
 | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
464  | 
chk_blocks('''\  | 
| 
1711.2.8
by John Arbash Meinel
 rot13 the code snippet to help with clarity.  | 
465  | 
    trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
 | 
466  | 
    """
 | 
|
467  | 
    gnxrf_netf = ['svyr*']
 | 
|
468  | 
    gnxrf_bcgvbaf = ['ab-erphefr']
 | 
|
469  | 
  
 | 
|
470  | 
    qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr):
 | 
|
471  | 
        sebz omeyvo.nqq vzcbeg fzneg_nqq, nqq_ercbegre_cevag, nqq_ercbegre_ahyy
 | 
|
472  | 
        vs vf_dhvrg():
 | 
|
473  | 
            ercbegre = nqq_ercbegre_ahyy
 | 
|
474  | 
        ryfr:
 | 
|
475  | 
            ercbegre = nqq_ercbegre_cevag
 | 
|
476  | 
        fzneg_nqq(svyr_yvfg, abg ab_erphefr, ercbegre)
 | 
|
477  | 
||
478  | 
||
479  | 
pynff pzq_zxqve(Pbzznaq):
 | 
|
480  | 
'''.splitlines(True), '''\  | 
|
481  | 
    trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
 | 
|
482  | 
||
483  | 
    --qel-eha jvyy fubj juvpu svyrf jbhyq or nqqrq, ohg abg npghnyyl 
 | 
|
484  | 
    nqq gurz.
 | 
|
485  | 
    """
 | 
|
486  | 
    gnxrf_netf = ['svyr*']
 | 
|
487  | 
    gnxrf_bcgvbaf = ['ab-erphefr', 'qel-eha']
 | 
|
488  | 
||
489  | 
    qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr, qel_eha=Snyfr):
 | 
|
490  | 
        vzcbeg omeyvo.nqq
 | 
|
491  | 
||
492  | 
        vs qel_eha:
 | 
|
493  | 
            vs vf_dhvrg():
 | 
|
494  | 
                # Guvf vf cbvagyrff, ohg V'q engure abg envfr na reebe
 | 
|
495  | 
                npgvba = omeyvo.nqq.nqq_npgvba_ahyy
 | 
|
496  | 
            ryfr:
 | 
|
497  | 
  npgvba = omeyvo.nqq.nqq_npgvba_cevag
 | 
|
498  | 
        ryvs vf_dhvrg():
 | 
|
499  | 
            npgvba = omeyvo.nqq.nqq_npgvba_nqq
 | 
|
500  | 
        ryfr:
 | 
|
501  | 
       npgvba = omeyvo.nqq.nqq_npgvba_nqq_naq_cevag
 | 
|
502  | 
||
503  | 
        omeyvo.nqq.fzneg_nqq(svyr_yvfg, abg ab_erphefr, npgvba)
 | 
|
504  | 
||
505  | 
||
506  | 
pynff pzq_zxqve(Pbzznaq):
 | 
|
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
507  | 
'''.splitlines(True)  | 
508  | 
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])  | 
|
509  | 
||
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
510  | 
def test_patience_unified_diff(self):  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
511  | 
txt_a = ['hello there\n',  | 
512  | 
'world\n',  | 
|
513  | 
'how are you today?\n']  | 
|
514  | 
txt_b = ['hello there\n',  | 
|
515  | 
'how are you today?\n']  | 
|
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
516  | 
unified_diff = bzrlib.patiencediff.unified_diff  | 
517  | 
psm = bzrlib.patiencediff.PatienceSequenceMatcher  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
518  | 
self.assertEquals([ '--- \n',  | 
519  | 
'+++ \n',  | 
|
520  | 
'@@ -1,3 +1,2 @@\n',  | 
|
521  | 
' hello there\n',  | 
|
522  | 
'-world\n',  | 
|
523  | 
' how are you today?\n'  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
524  | 
                          ]
 | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
525  | 
, list(unified_diff(txt_a, txt_b,  | 
526  | 
sequencematcher=psm)))  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
527  | 
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')  | 
528  | 
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')  | 
|
529  | 
        # This is the result with LongestCommonSubstring matching
 | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
530  | 
self.assertEquals(['--- \n',  | 
531  | 
'+++ \n',  | 
|
532  | 
'@@ -1,6 +1,11 @@\n',  | 
|
533  | 
' a\n',  | 
|
534  | 
' b\n',  | 
|
535  | 
' c\n',  | 
|
536  | 
'+d\n',  | 
|
537  | 
'+e\n',  | 
|
538  | 
'+f\n',  | 
|
539  | 
'+x\n',  | 
|
540  | 
'+y\n',  | 
|
541  | 
' d\n',  | 
|
542  | 
' e\n',  | 
|
543  | 
' f\n']  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
544  | 
, list(unified_diff(txt_a, txt_b)))  | 
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
545  | 
        # And the patience diff
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
546  | 
self.assertEquals(['--- \n',  | 
547  | 
'+++ \n',  | 
|
548  | 
'@@ -4,6 +4,11 @@\n',  | 
|
549  | 
' d\n',  | 
|
550  | 
' e\n',  | 
|
551  | 
' f\n',  | 
|
552  | 
'+x\n',  | 
|
553  | 
'+y\n',  | 
|
554  | 
'+d\n',  | 
|
555  | 
'+e\n',  | 
|
556  | 
'+f\n',  | 
|
557  | 
' g\n',  | 
|
558  | 
' h\n',  | 
|
559  | 
' i\n',  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
560  | 
                          ]
 | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
561  | 
, list(unified_diff(txt_a, txt_b,  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
562  | 
sequencematcher=psm)))  | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
563  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
564  | 
|
| 
1711.2.15
by John Arbash Meinel
 Found a couple CDV left  | 
565  | 
class TestPatienceDiffLibFiles(TestCaseInTempDir):  | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
566  | 
|
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
567  | 
def test_patience_unified_diff_files(self):  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
568  | 
txt_a = ['hello there\n',  | 
569  | 
'world\n',  | 
|
570  | 
'how are you today?\n']  | 
|
571  | 
txt_b = ['hello there\n',  | 
|
572  | 
'how are you today?\n']  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
573  | 
open('a1', 'wb').writelines(txt_a)  | 
574  | 
open('b1', 'wb').writelines(txt_b)  | 
|
575  | 
||
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
576  | 
unified_diff_files = bzrlib.patiencediff.unified_diff_files  | 
577  | 
psm = bzrlib.patiencediff.PatienceSequenceMatcher  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
578  | 
self.assertEquals(['--- a1 \n',  | 
579  | 
'+++ b1 \n',  | 
|
580  | 
'@@ -1,3 +1,2 @@\n',  | 
|
581  | 
' hello there\n',  | 
|
582  | 
'-world\n',  | 
|
583  | 
' how are you today?\n',  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
584  | 
                          ]
 | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
585  | 
, list(unified_diff_files('a1', 'b1',  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
586  | 
sequencematcher=psm)))  | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
587  | 
|
588  | 
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')  | 
|
589  | 
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')  | 
|
590  | 
open('a2', 'wb').writelines(txt_a)  | 
|
591  | 
open('b2', 'wb').writelines(txt_b)  | 
|
592  | 
||
593  | 
        # This is the result with LongestCommonSubstring matching
 | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
594  | 
self.assertEquals(['--- a2 \n',  | 
595  | 
'+++ b2 \n',  | 
|
596  | 
'@@ -1,6 +1,11 @@\n',  | 
|
597  | 
' a\n',  | 
|
598  | 
' b\n',  | 
|
599  | 
' c\n',  | 
|
600  | 
'+d\n',  | 
|
601  | 
'+e\n',  | 
|
602  | 
'+f\n',  | 
|
603  | 
'+x\n',  | 
|
604  | 
'+y\n',  | 
|
605  | 
' d\n',  | 
|
606  | 
' e\n',  | 
|
607  | 
' f\n']  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
608  | 
, list(unified_diff_files('a2', 'b2')))  | 
609  | 
||
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
610  | 
        # And the patience diff
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
611  | 
self.assertEquals(['--- a2 \n',  | 
612  | 
'+++ b2 \n',  | 
|
613  | 
'@@ -4,6 +4,11 @@\n',  | 
|
614  | 
' d\n',  | 
|
615  | 
' e\n',  | 
|
616  | 
' f\n',  | 
|
617  | 
'+x\n',  | 
|
618  | 
'+y\n',  | 
|
619  | 
'+d\n',  | 
|
620  | 
'+e\n',  | 
|
621  | 
'+f\n',  | 
|
622  | 
' g\n',  | 
|
623  | 
' h\n',  | 
|
624  | 
' i\n',  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
625  | 
                          ]
 | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
626  | 
, list(unified_diff_files('a2', 'b2',  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
627  | 
sequencematcher=psm)))  |