bzr branch
http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
| 
2052.3.1
by John Arbash Meinel
 Add tests to cleanup the copyright of all source files  | 
1  | 
# Copyright (C) 2005, 2006 Canonical Ltd
 | 
| 
1711.2.16
by John Arbash Meinel
 test_diff needs a copyright statement  | 
2  | 
#
 | 
3  | 
# This program is free software; you can redistribute it and/or modify
 | 
|
4  | 
# it under the terms of the GNU General Public License as published by
 | 
|
5  | 
# the Free Software Foundation; either version 2 of the License, or
 | 
|
6  | 
# (at your option) any later version.
 | 
|
7  | 
#
 | 
|
8  | 
# This program is distributed in the hope that it will be useful,
 | 
|
9  | 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|
10  | 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|
11  | 
# GNU General Public License for more details.
 | 
|
| 
2052.3.1
by John Arbash Meinel
 Add tests to cleanup the copyright of all source files  | 
12  | 
#
 | 
| 
1711.2.16
by John Arbash Meinel
 test_diff needs a copyright statement  | 
13  | 
# You should have received a copy of the GNU General Public License
 | 
14  | 
# along with this program; if not, write to the Free Software
 | 
|
15  | 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 | 
|
16  | 
||
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
17  | 
import os  | 
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
18  | 
from cStringIO import StringIO  | 
| 
1692.8.7
by James Henstridge
 changes suggested by John Meinel  | 
19  | 
import errno  | 
| 
1920.1.1
by John Arbash Meinel
 fix bug #56307, handle binary files even when LANG is not english  | 
20  | 
import subprocess  | 
| 
1920.1.3
by John Arbash Meinel
 Remove spurious import  | 
21  | 
from tempfile import TemporaryFile  | 
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
22  | 
|
| 
1692.8.5
by James Henstridge
 merge from bzr.dev  | 
23  | 
from bzrlib.diff import internal_diff, external_diff, show_diff_trees  | 
| 
1711.2.56
by John Arbash Meinel
 Raise NoDiff if 'diff' not present.  | 
24  | 
from bzrlib.errors import BinaryFile, NoDiff  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
25  | 
import bzrlib.patiencediff  | 
| 
1711.2.54
by John Arbash Meinel
 Use mkstemp instead of NamedTemporary file for external diff.  | 
26  | 
from bzrlib.tests import (TestCase, TestCaseWithTransport,  | 
27  | 
TestCaseInTempDir, TestSkipped)  | 
|
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
28  | 
|
29  | 
||
| 
1558.15.11
by Aaron Bentley
 Apply merge review suggestions  | 
30  | 
def udiff_lines(old, new, allow_binary=False):  | 
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
31  | 
output = StringIO()  | 
| 
1558.15.11
by Aaron Bentley
 Apply merge review suggestions  | 
32  | 
internal_diff('old', old, 'new', new, output, allow_binary)  | 
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
33  | 
output.seek(0, 0)  | 
34  | 
return output.readlines()  | 
|
35  | 
||
| 
1711.2.54
by John Arbash Meinel
 Use mkstemp instead of NamedTemporary file for external diff.  | 
36  | 
|
| 
1711.2.57
by John Arbash Meinel
 Allow external diff to write to a file without a fileno.  | 
37  | 
def external_udiff_lines(old, new, use_stringio=False):  | 
38  | 
if use_stringio:  | 
|
39  | 
        # StringIO has no fileno, so it tests a different codepath
 | 
|
40  | 
output = StringIO()  | 
|
41  | 
else:  | 
|
42  | 
output = TemporaryFile()  | 
|
| 
1692.8.7
by James Henstridge
 changes suggested by John Meinel  | 
43  | 
try:  | 
44  | 
external_diff('old', old, 'new', new, output, diff_opts=['-u'])  | 
|
| 
1711.2.58
by John Arbash Meinel
 Use osutils.pumpfile so we don't have to buffer everything in ram  | 
45  | 
except NoDiff:  | 
| 
1711.2.56
by John Arbash Meinel
 Raise NoDiff if 'diff' not present.  | 
46  | 
raise TestSkipped('external "diff" not present to test')  | 
| 
1692.8.2
by James Henstridge
 add a test for sending external diff output to a file  | 
47  | 
output.seek(0, 0)  | 
48  | 
lines = output.readlines()  | 
|
49  | 
output.close()  | 
|
50  | 
return lines  | 
|
51  | 
||
52  | 
||
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
53  | 
class TestDiff(TestCase):  | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
54  | 
|
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
55  | 
def test_add_nl(self):  | 
56  | 
"""diff generates a valid diff for patches that add a newline"""  | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
57  | 
lines = udiff_lines(['boo'], ['boo\n'])  | 
| 
1185.16.145
by Martin Pool
 Remove all assert statements from test cases.  | 
58  | 
self.check_patch(lines)  | 
59  | 
self.assertEquals(lines[4], '\\ No newline at end of file\n')  | 
|
60  | 
            ## "expected no-nl, got %r" % lines[4]
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
61  | 
|
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
62  | 
def test_add_nl_2(self):  | 
63  | 
"""diff generates a valid diff for patches that change last line and  | 
|
64  | 
        add a newline.
 | 
|
65  | 
        """
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
66  | 
lines = udiff_lines(['boo'], ['goo\n'])  | 
| 
1185.16.145
by Martin Pool
 Remove all assert statements from test cases.  | 
67  | 
self.check_patch(lines)  | 
68  | 
self.assertEquals(lines[4], '\\ No newline at end of file\n')  | 
|
69  | 
            ## "expected no-nl, got %r" % lines[4]
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
70  | 
|
| 
1102
by Martin Pool
 - merge test refactoring from robertc  | 
71  | 
def test_remove_nl(self):  | 
72  | 
"""diff generates a valid diff for patches that change last line and  | 
|
73  | 
        add a newline.
 | 
|
74  | 
        """
 | 
|
| 
974.1.6
by Aaron Bentley
 Added unit tests  | 
75  | 
lines = udiff_lines(['boo\n'], ['boo'])  | 
| 
1185.16.145
by Martin Pool
 Remove all assert statements from test cases.  | 
76  | 
self.check_patch(lines)  | 
77  | 
self.assertEquals(lines[5], '\\ No newline at end of file\n')  | 
|
78  | 
            ## "expected no-nl, got %r" % lines[5]
 | 
|
79  | 
||
80  | 
def check_patch(self, lines):  | 
|
81  | 
self.assert_(len(lines) > 1)  | 
|
82  | 
            ## "Not enough lines for a file header for patch:\n%s" % "".join(lines)
 | 
|
83  | 
self.assert_(lines[0].startswith ('---'))  | 
|
84  | 
            ## 'No orig line for patch:\n%s' % "".join(lines)
 | 
|
85  | 
self.assert_(lines[1].startswith ('+++'))  | 
|
86  | 
            ## 'No mod line for patch:\n%s' % "".join(lines)
 | 
|
87  | 
self.assert_(len(lines) > 2)  | 
|
88  | 
            ## "No hunks for patch:\n%s" % "".join(lines)
 | 
|
89  | 
self.assert_(lines[2].startswith('@@'))  | 
|
90  | 
            ## "No hunk header for patch:\n%s" % "".join(lines)
 | 
|
91  | 
self.assert_('@@' in lines[2][2:])  | 
|
92  | 
            ## "Unterminated hunk header for patch:\n%s" % "".join(lines)
 | 
|
93  | 
||
| 
1558.15.2
by Aaron Bentley
 Implemented binary file handling for diff  | 
94  | 
def test_binary_lines(self):  | 
95  | 
self.assertRaises(BinaryFile, udiff_lines, [1023 * 'a' + '\x00'], [])  | 
|
96  | 
self.assertRaises(BinaryFile, udiff_lines, [], [1023 * 'a' + '\x00'])  | 
|
| 
1558.15.11
by Aaron Bentley
 Apply merge review suggestions  | 
97  | 
udiff_lines([1023 * 'a' + '\x00'], [], allow_binary=True)  | 
98  | 
udiff_lines([], [1023 * 'a' + '\x00'], allow_binary=True)  | 
|
| 
1692.8.2
by James Henstridge
 add a test for sending external diff output to a file  | 
99  | 
|
100  | 
def test_external_diff(self):  | 
|
101  | 
lines = external_udiff_lines(['boo\n'], ['goo\n'])  | 
|
102  | 
self.check_patch(lines)  | 
|
| 
1899.1.6
by John Arbash Meinel
 internal_diff always adds a trailing \n, make sure external_diff does too  | 
103  | 
self.assertEqual('\n', lines[-1])  | 
| 
1711.2.57
by John Arbash Meinel
 Allow external diff to write to a file without a fileno.  | 
104  | 
|
105  | 
def test_external_diff_no_fileno(self):  | 
|
106  | 
        # Make sure that we can handle not having a fileno, even
 | 
|
107  | 
        # if the diff is large
 | 
|
108  | 
lines = external_udiff_lines(['boo\n']*10000,  | 
|
109  | 
['goo\n']*10000,  | 
|
110  | 
use_stringio=True)  | 
|
111  | 
self.check_patch(lines)  | 
|
| 
1899.1.1
by John Arbash Meinel
 Fix the bug in the NoDiff exception class, and add a test  | 
112  | 
|
| 
1920.1.1
by John Arbash Meinel
 fix bug #56307, handle binary files even when LANG is not english  | 
113  | 
def test_external_diff_binary_lang_c(self):  | 
114  | 
orig_lang = os.environ.get('LANG')  | 
|
115  | 
try:  | 
|
116  | 
os.environ['LANG'] = 'C'  | 
|
117  | 
lines = external_udiff_lines(['\x00foobar\n'], ['foo\x00bar\n'])  | 
|
| 
1959.1.1
by Marien Zwart
 merge.  | 
118  | 
            # Older versions of diffutils say "Binary files", newer
 | 
119  | 
            # versions just say "Files".
 | 
|
120  | 
self.assertContainsRe(lines[0],  | 
|
121  | 
'(Binary f|F)iles old and new differ\n')  | 
|
122  | 
self.assertEquals(lines[1:], ['\n'])  | 
|
| 
1920.1.1
by John Arbash Meinel
 fix bug #56307, handle binary files even when LANG is not english  | 
123  | 
finally:  | 
124  | 
if orig_lang is None:  | 
|
125  | 
del os.environ['LANG']  | 
|
126  | 
else:  | 
|
127  | 
os.environ['LANG'] = orig_lang  | 
|
| 
1899.1.4
by John Arbash Meinel
 Just swallow a return code of 2  | 
128  | 
|
| 
1899.1.1
by John Arbash Meinel
 Fix the bug in the NoDiff exception class, and add a test  | 
129  | 
def test_no_external_diff(self):  | 
130  | 
"""Check that NoDiff is raised when diff is not available"""  | 
|
131  | 
        # Use os.environ['PATH'] to make sure no 'diff' command is available
 | 
|
132  | 
orig_path = os.environ['PATH']  | 
|
133  | 
try:  | 
|
134  | 
os.environ['PATH'] = ''  | 
|
135  | 
self.assertRaises(NoDiff, external_diff,  | 
|
136  | 
'old', ['boo\n'], 'new', ['goo\n'],  | 
|
137  | 
StringIO(), diff_opts=['-u'])  | 
|
138  | 
finally:  | 
|
139  | 
os.environ['PATH'] = orig_path  | 
|
| 
1692.8.2
by James Henstridge
 add a test for sending external diff output to a file  | 
140  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
141  | 
def test_internal_diff_default(self):  | 
142  | 
        # Default internal diff encoding is utf8
 | 
|
143  | 
output = StringIO()  | 
|
144  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
145  | 
u'new_\xe5', ['new_text\n'], output)  | 
|
146  | 
lines = output.getvalue().splitlines(True)  | 
|
147  | 
self.check_patch(lines)  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
148  | 
self.assertEquals(['--- old_\xc2\xb5\n',  | 
149  | 
'+++ new_\xc3\xa5\n',  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
150  | 
'@@ -1,1 +1,1 @@\n',  | 
151  | 
'-old_text\n',  | 
|
152  | 
'+new_text\n',  | 
|
153  | 
'\n',  | 
|
154  | 
                          ]
 | 
|
155  | 
, lines)  | 
|
156  | 
||
157  | 
def test_internal_diff_utf8(self):  | 
|
158  | 
output = StringIO()  | 
|
159  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
160  | 
u'new_\xe5', ['new_text\n'], output,  | 
|
161  | 
path_encoding='utf8')  | 
|
162  | 
lines = output.getvalue().splitlines(True)  | 
|
163  | 
self.check_patch(lines)  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
164  | 
self.assertEquals(['--- old_\xc2\xb5\n',  | 
165  | 
'+++ new_\xc3\xa5\n',  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
166  | 
'@@ -1,1 +1,1 @@\n',  | 
167  | 
'-old_text\n',  | 
|
168  | 
'+new_text\n',  | 
|
169  | 
'\n',  | 
|
170  | 
                          ]
 | 
|
171  | 
, lines)  | 
|
172  | 
||
173  | 
def test_internal_diff_iso_8859_1(self):  | 
|
174  | 
output = StringIO()  | 
|
175  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
176  | 
u'new_\xe5', ['new_text\n'], output,  | 
|
177  | 
path_encoding='iso-8859-1')  | 
|
178  | 
lines = output.getvalue().splitlines(True)  | 
|
179  | 
self.check_patch(lines)  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
180  | 
self.assertEquals(['--- old_\xb5\n',  | 
181  | 
'+++ new_\xe5\n',  | 
|
| 
1711.2.30
by John Arbash Meinel
 Fix bug in internal_diff handling of unicode paths  | 
182  | 
'@@ -1,1 +1,1 @@\n',  | 
183  | 
'-old_text\n',  | 
|
184  | 
'+new_text\n',  | 
|
185  | 
'\n',  | 
|
186  | 
                          ]
 | 
|
187  | 
, lines)  | 
|
188  | 
||
189  | 
def test_internal_diff_returns_bytes(self):  | 
|
190  | 
import StringIO  | 
|
191  | 
output = StringIO.StringIO()  | 
|
192  | 
internal_diff(u'old_\xb5', ['old_text\n'],  | 
|
193  | 
u'new_\xe5', ['new_text\n'], output)  | 
|
194  | 
self.failUnless(isinstance(output.getvalue(), str),  | 
|
195  | 
'internal_diff should return bytestrings')  | 
|
196  | 
||
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
197  | 
|
| 
1920.1.1
by John Arbash Meinel
 fix bug #56307, handle binary files even when LANG is not english  | 
198  | 
class TestDiffFiles(TestCaseInTempDir):  | 
199  | 
||
200  | 
def test_external_diff_binary(self):  | 
|
201  | 
"""The output when using external diff should use diff's i18n error"""  | 
|
202  | 
        # Make sure external_diff doesn't fail in the current LANG
 | 
|
203  | 
lines = external_udiff_lines(['\x00foobar\n'], ['foo\x00bar\n'])  | 
|
204  | 
||
205  | 
cmd = ['diff', '-u', 'old', 'new']  | 
|
206  | 
open('old', 'wb').write('\x00foobar\n')  | 
|
207  | 
open('new', 'wb').write('foo\x00bar\n')  | 
|
208  | 
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE,  | 
|
209  | 
stdin=subprocess.PIPE)  | 
|
210  | 
out, err = pipe.communicate()  | 
|
211  | 
        # Diff returns '2' on Binary files.
 | 
|
212  | 
self.assertEqual(2, pipe.returncode)  | 
|
213  | 
        # We should output whatever diff tells us, plus a trailing newline
 | 
|
214  | 
self.assertEqual(out.splitlines(True) + ['\n'], lines)  | 
|
215  | 
||
216  | 
||
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
217  | 
class TestDiffDates(TestCaseWithTransport):  | 
218  | 
||
219  | 
def setUp(self):  | 
|
220  | 
super(TestDiffDates, self).setUp()  | 
|
221  | 
self.wt = self.make_branch_and_tree('.')  | 
|
222  | 
self.b = self.wt.branch  | 
|
223  | 
self.build_tree_contents([  | 
|
224  | 
('file1', 'file1 contents at rev 1\n'),  | 
|
225  | 
('file2', 'file2 contents at rev 1\n')  | 
|
226  | 
            ])
 | 
|
227  | 
self.wt.add(['file1', 'file2'])  | 
|
228  | 
self.wt.commit(  | 
|
229  | 
message='Revision 1',  | 
|
230  | 
timestamp=1143849600, # 2006-04-01 00:00:00 UTC  | 
|
231  | 
timezone=0,  | 
|
232  | 
rev_id='rev-1')  | 
|
233  | 
self.build_tree_contents([('file1', 'file1 contents at rev 2\n')])  | 
|
234  | 
self.wt.commit(  | 
|
235  | 
message='Revision 2',  | 
|
236  | 
timestamp=1143936000, # 2006-04-02 00:00:00 UTC  | 
|
237  | 
timezone=28800,  | 
|
238  | 
rev_id='rev-2')  | 
|
239  | 
self.build_tree_contents([('file2', 'file2 contents at rev 3\n')])  | 
|
240  | 
self.wt.commit(  | 
|
241  | 
message='Revision 3',  | 
|
242  | 
timestamp=1144022400, # 2006-04-03 00:00:00 UTC  | 
|
243  | 
timezone=-3600,  | 
|
244  | 
rev_id='rev-3')  | 
|
245  | 
self.wt.remove(['file2'])  | 
|
246  | 
self.wt.commit(  | 
|
247  | 
message='Revision 4',  | 
|
248  | 
timestamp=1144108800, # 2006-04-04 00:00:00 UTC  | 
|
249  | 
timezone=0,  | 
|
250  | 
rev_id='rev-4')  | 
|
251  | 
self.build_tree_contents([  | 
|
252  | 
('file1', 'file1 contents in working tree\n')  | 
|
253  | 
            ])
 | 
|
254  | 
        # set the date stamps for files in the working tree to known values
 | 
|
255  | 
os.utime('file1', (1144195200, 1144195200)) # 2006-04-05 00:00:00 UTC  | 
|
256  | 
||
| 
1551.7.22
by Aaron Bentley
 Changes from review  | 
257  | 
def get_diff(self, tree1, tree2, specific_files=None, working_tree=None):  | 
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
258  | 
output = StringIO()  | 
| 
1551.7.22
by Aaron Bentley
 Changes from review  | 
259  | 
if working_tree is not None:  | 
260  | 
extra_trees = (working_tree,)  | 
|
261  | 
else:  | 
|
262  | 
extra_trees = ()  | 
|
263  | 
show_diff_trees(tree1, tree2, output, specific_files=specific_files,  | 
|
264  | 
extra_trees=extra_trees, old_label='old/',  | 
|
265  | 
new_label='new/')  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
266  | 
return output.getvalue()  | 
267  | 
||
268  | 
def test_diff_rev_tree_working_tree(self):  | 
|
269  | 
output = self.get_diff(self.wt.basis_tree(), self.wt)  | 
|
270  | 
        # note that the date for old/file1 is from rev 2 rather than from
 | 
|
271  | 
        # the basis revision (rev 4)
 | 
|
272  | 
self.assertEqualDiff(output, '''\  | 
|
273  | 
=== modified file 'file1'
 | 
|
274  | 
--- old/file1\t2006-04-02 00:00:00 +0000  | 
|
275  | 
+++ new/file1\t2006-04-05 00:00:00 +0000  | 
|
276  | 
@@ -1,1 +1,1 @@
 | 
|
277  | 
-file1 contents at rev 2
 | 
|
278  | 
+file1 contents in working tree
 | 
|
279  | 
||
280  | 
''')  | 
|
281  | 
||
282  | 
def test_diff_rev_tree_rev_tree(self):  | 
|
283  | 
tree1 = self.b.repository.revision_tree('rev-2')  | 
|
284  | 
tree2 = self.b.repository.revision_tree('rev-3')  | 
|
285  | 
output = self.get_diff(tree1, tree2)  | 
|
286  | 
self.assertEqualDiff(output, '''\  | 
|
287  | 
=== modified file 'file2'
 | 
|
288  | 
--- old/file2\t2006-04-01 00:00:00 +0000  | 
|
289  | 
+++ new/file2\t2006-04-03 00:00:00 +0000  | 
|
290  | 
@@ -1,1 +1,1 @@
 | 
|
291  | 
-file2 contents at rev 1
 | 
|
292  | 
+file2 contents at rev 3
 | 
|
293  | 
||
294  | 
''')  | 
|
295  | 
||
296  | 
def test_diff_add_files(self):  | 
|
297  | 
tree1 = self.b.repository.revision_tree(None)  | 
|
298  | 
tree2 = self.b.repository.revision_tree('rev-1')  | 
|
299  | 
output = self.get_diff(tree1, tree2)  | 
|
300  | 
        # the files have the epoch time stamp for the tree in which
 | 
|
301  | 
        # they don't exist.
 | 
|
302  | 
self.assertEqualDiff(output, '''\  | 
|
303  | 
=== added file 'file1'
 | 
|
304  | 
--- old/file1\t1970-01-01 00:00:00 +0000  | 
|
305  | 
+++ new/file1\t2006-04-01 00:00:00 +0000  | 
|
306  | 
@@ -0,0 +1,1 @@
 | 
|
307  | 
+file1 contents at rev 1
 | 
|
308  | 
||
309  | 
=== added file 'file2'
 | 
|
310  | 
--- old/file2\t1970-01-01 00:00:00 +0000  | 
|
311  | 
+++ new/file2\t2006-04-01 00:00:00 +0000  | 
|
312  | 
@@ -0,0 +1,1 @@
 | 
|
313  | 
+file2 contents at rev 1
 | 
|
314  | 
||
315  | 
''')  | 
|
316  | 
||
317  | 
def test_diff_remove_files(self):  | 
|
318  | 
tree1 = self.b.repository.revision_tree('rev-3')  | 
|
319  | 
tree2 = self.b.repository.revision_tree('rev-4')  | 
|
320  | 
output = self.get_diff(tree1, tree2)  | 
|
321  | 
        # the file has the epoch time stamp for the tree in which
 | 
|
322  | 
        # it doesn't exist.
 | 
|
323  | 
self.assertEqualDiff(output, '''\  | 
|
324  | 
=== removed file 'file2'
 | 
|
325  | 
--- old/file2\t2006-04-03 00:00:00 +0000  | 
|
326  | 
+++ new/file2\t1970-01-01 00:00:00 +0000  | 
|
327  | 
@@ -1,1 +0,0 @@
 | 
|
328  | 
-file2 contents at rev 3
 | 
|
329  | 
||
330  | 
''')  | 
|
331  | 
||
| 
1551.7.17
by Aaron Bentley
 Switch to PathsNotVersioned, accept extra_trees  | 
332  | 
def test_show_diff_specified(self):  | 
| 
1551.7.22
by Aaron Bentley
 Changes from review  | 
333  | 
"""A working tree filename can be used to identify a file"""  | 
| 
1551.7.17
by Aaron Bentley
 Switch to PathsNotVersioned, accept extra_trees  | 
334  | 
self.wt.rename_one('file1', 'file1b')  | 
335  | 
old_tree = self.b.repository.revision_tree('rev-1')  | 
|
336  | 
new_tree = self.b.repository.revision_tree('rev-4')  | 
|
| 
1551.7.22
by Aaron Bentley
 Changes from review  | 
337  | 
out = self.get_diff(old_tree, new_tree, specific_files=['file1b'],  | 
338  | 
working_tree=self.wt)  | 
|
339  | 
self.assertContainsRe(out, 'file1\t')  | 
|
| 
1551.7.17
by Aaron Bentley
 Switch to PathsNotVersioned, accept extra_trees  | 
340  | 
|
| 
1551.7.22
by Aaron Bentley
 Changes from review  | 
341  | 
def test_recursive_diff(self):  | 
342  | 
"""Children of directories are matched"""  | 
|
343  | 
os.mkdir('dir1')  | 
|
344  | 
os.mkdir('dir2')  | 
|
345  | 
self.wt.add(['dir1', 'dir2'])  | 
|
346  | 
self.wt.rename_one('file1', 'dir1/file1')  | 
|
347  | 
old_tree = self.b.repository.revision_tree('rev-1')  | 
|
348  | 
new_tree = self.b.repository.revision_tree('rev-4')  | 
|
349  | 
out = self.get_diff(old_tree, new_tree, specific_files=['dir1'],  | 
|
350  | 
working_tree=self.wt)  | 
|
351  | 
self.assertContainsRe(out, 'file1\t')  | 
|
352  | 
out = self.get_diff(old_tree, new_tree, specific_files=['dir2'],  | 
|
353  | 
working_tree=self.wt)  | 
|
354  | 
self.assertNotContainsRe(out, 'file1\t')  | 
|
| 
1740.2.5
by Aaron Bentley
 Merge from bzr.dev  | 
355  | 
|
| 
1899.1.1
by John Arbash Meinel
 Fix the bug in the NoDiff exception class, and add a test  | 
356  | 
|
| 
1711.2.15
by John Arbash Meinel
 Found a couple CDV left  | 
357  | 
class TestPatienceDiffLib(TestCase):  | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
358  | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
359  | 
def test_unique_lcs(self):  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
360  | 
unique_lcs = bzrlib.patiencediff.unique_lcs  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
361  | 
self.assertEquals(unique_lcs('', ''), [])  | 
362  | 
self.assertEquals(unique_lcs('a', 'a'), [(0,0)])  | 
|
363  | 
self.assertEquals(unique_lcs('a', 'b'), [])  | 
|
364  | 
self.assertEquals(unique_lcs('ab', 'ab'), [(0,0), (1,1)])  | 
|
365  | 
self.assertEquals(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])  | 
|
366  | 
self.assertEquals(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])  | 
|
367  | 
self.assertEquals(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),  | 
|
368  | 
(3,3), (4,4)])  | 
|
369  | 
self.assertEquals(unique_lcs('acbac', 'abc'), [(2,1)])  | 
|
370  | 
||
371  | 
def test_recurse_matches(self):  | 
|
372  | 
def test_one(a, b, matches):  | 
|
373  | 
test_matches = []  | 
|
| 
1711.2.22
by John Arbash Meinel
 Passing the alo parameter to recurse_matches shaves of 5% of the diff time.  | 
374  | 
bzrlib.patiencediff.recurse_matches(a, b, 0, 0, len(a), len(b),  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
375  | 
test_matches, 10)  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
376  | 
self.assertEquals(test_matches, matches)  | 
377  | 
||
| 
1711.2.17
by John Arbash Meinel
 Small cleanups to patience_diff code.  | 
378  | 
test_one(['a', '', 'b', '', 'c'], ['a', 'a', 'b', 'c', 'c'],  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
379  | 
[(0, 0), (2, 2), (4, 4)])  | 
380  | 
test_one(['a', 'c', 'b', 'a', 'c'], ['a', 'b', 'c'],  | 
|
381  | 
[(0, 0), (2, 1), (4, 2)])  | 
|
382  | 
||
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
383  | 
        # recurse_matches doesn't match non-unique 
 | 
384  | 
        # lines surrounded by bogus text.
 | 
|
| 
1185.81.24
by Aaron Bentley
 Reoganize patience-related code  | 
385  | 
        # The update has been done in patiencediff.SequenceMatcher instead
 | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
386  | 
|
387  | 
        # This is what it could be
 | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
388  | 
        #test_one('aBccDe', 'abccde', [(0,0), (2,2), (3,3), (5,5)])
 | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
389  | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
390  | 
        # This is what it currently gives:
 | 
391  | 
test_one('aBccDe', 'abccde', [(0,0), (5,5)])  | 
|
392  | 
||
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
393  | 
def test_matching_blocks(self):  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
394  | 
def chk_blocks(a, b, expected_blocks):  | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
395  | 
            # difflib always adds a signature of the total
 | 
396  | 
            # length, with no matching entries at the end
 | 
|
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
397  | 
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)  | 
| 
1185.81.11
by John Arbash Meinel
 Found some edge cases that weren't being matched.  | 
398  | 
blocks = s.get_matching_blocks()  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
399  | 
self.assertEquals((len(a), len(b), 0), blocks[-1])  | 
400  | 
self.assertEquals(expected_blocks, blocks[:-1])  | 
|
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
401  | 
|
| 
1185.81.2
by John Arbash Meinel
 A couple small tests.  | 
402  | 
        # Some basic matching tests
 | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
403  | 
chk_blocks('', '', [])  | 
404  | 
chk_blocks([], [], [])  | 
|
405  | 
chk_blocks('abcd', 'abcd', [(0, 0, 4)])  | 
|
406  | 
chk_blocks('abcd', 'abce', [(0, 0, 3)])  | 
|
407  | 
chk_blocks('eabc', 'abce', [(1, 0, 3)])  | 
|
408  | 
chk_blocks('eabce', 'abce', [(1, 0, 4)])  | 
|
409  | 
chk_blocks('abcde', 'abXde', [(0, 0, 2), (3, 3, 2)])  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
410  | 
chk_blocks('abcde', 'abXYZde', [(0, 0, 2), (3, 5, 2)])  | 
411  | 
chk_blocks('abde', 'abXYZde', [(0, 0, 2), (2, 5, 2)])  | 
|
412  | 
        # This may check too much, but it checks to see that 
 | 
|
413  | 
        # a copied block stays attached to the previous section,
 | 
|
414  | 
        # not the later one.
 | 
|
415  | 
        # difflib would tend to grab the trailing longest match
 | 
|
416  | 
        # which would make the diff not look right
 | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
417  | 
chk_blocks('abcdefghijklmnop', 'abcdefxydefghijklmnop',  | 
418  | 
[(0, 0, 6), (6, 11, 10)])  | 
|
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
419  | 
|
| 
1185.81.2
by John Arbash Meinel
 A couple small tests.  | 
420  | 
        # make sure it supports passing in lists
 | 
421  | 
chk_blocks(  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
422  | 
['hello there\n',  | 
423  | 
'world\n',  | 
|
424  | 
'how are you today?\n'],  | 
|
425  | 
['hello there\n',  | 
|
426  | 
'how are you today?\n'],  | 
|
| 
1185.81.2
by John Arbash Meinel
 A couple small tests.  | 
427  | 
[(0, 0, 1), (2, 1, 1)])  | 
| 
1185.81.1
by John Arbash Meinel
 Adding nofrillsprecisemerge's diff algorithm, wrapped in difflib.  | 
428  | 
|
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
429  | 
        # non unique lines surrounded by non-matching lines
 | 
430  | 
        # won't be found
 | 
|
431  | 
chk_blocks('aBccDe', 'abccde', [(0,0,1), (5,5,1)])  | 
|
432  | 
||
433  | 
        # But they only need to be locally unique
 | 
|
434  | 
chk_blocks('aBcDec', 'abcdec', [(0,0,1), (2,2,1), (4,4,2)])  | 
|
435  | 
||
436  | 
        # non unique blocks won't be matched
 | 
|
437  | 
chk_blocks('aBcdEcdFg', 'abcdecdfg', [(0,0,1), (8,8,1)])  | 
|
438  | 
||
439  | 
        # but locally unique ones will
 | 
|
440  | 
chk_blocks('aBcdEeXcdFg', 'abcdecdfg', [(0,0,1), (2,2,2),  | 
|
441  | 
(5,4,1), (7,5,2), (10,8,1)])  | 
|
442  | 
||
443  | 
chk_blocks('abbabbXd', 'cabbabxd', [(7,7,1)])  | 
|
444  | 
chk_blocks('abbabbbb', 'cabbabbc', [])  | 
|
445  | 
chk_blocks('bbbbbbbb', 'cbbbbbbc', [])  | 
|
| 
1185.81.11
by John Arbash Meinel
 Found some edge cases that weren't being matched.  | 
446  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
447  | 
def test_opcodes(self):  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
448  | 
def chk_ops(a, b, expected_codes):  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
449  | 
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)  | 
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
450  | 
self.assertEquals(expected_codes, s.get_opcodes())  | 
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
451  | 
|
452  | 
chk_ops('', '', [])  | 
|
453  | 
chk_ops([], [], [])  | 
|
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
454  | 
chk_ops('abcd', 'abcd', [('equal', 0,4, 0,4)])  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
455  | 
chk_ops('abcd', 'abce', [('equal', 0,3, 0,3),  | 
456  | 
('replace', 3,4, 3,4)  | 
|
457  | 
                                ])
 | 
|
458  | 
chk_ops('eabc', 'abce', [('delete', 0,1, 0,0),  | 
|
459  | 
('equal', 1,4, 0,3),  | 
|
460  | 
('insert', 4,4, 3,4)  | 
|
461  | 
                                ])
 | 
|
462  | 
chk_ops('eabce', 'abce', [('delete', 0,1, 0,0),  | 
|
463  | 
('equal', 1,5, 0,4)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
464  | 
                                 ])
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
465  | 
chk_ops('abcde', 'abXde', [('equal', 0,2, 0,2),  | 
466  | 
('replace', 2,3, 2,3),  | 
|
467  | 
('equal', 3,5, 3,5)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
468  | 
                                  ])
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
469  | 
chk_ops('abcde', 'abXYZde', [('equal', 0,2, 0,2),  | 
470  | 
('replace', 2,3, 2,5),  | 
|
471  | 
('equal', 3,5, 5,7)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
472  | 
                                    ])
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
473  | 
chk_ops('abde', 'abXYZde', [('equal', 0,2, 0,2),  | 
474  | 
('insert', 2,2, 2,5),  | 
|
475  | 
('equal', 2,4, 5,7)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
476  | 
                                   ])
 | 
477  | 
chk_ops('abcdefghijklmnop', 'abcdefxydefghijklmnop',  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
478  | 
[('equal', 0,6, 0,6),  | 
479  | 
('insert', 6,6, 6,11),  | 
|
480  | 
('equal', 6,16, 11,21)  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
481  | 
                ])
 | 
482  | 
chk_ops(  | 
|
483  | 
[ 'hello there\n'  | 
|
484  | 
, 'world\n'  | 
|
485  | 
, 'how are you today?\n'],  | 
|
486  | 
[ 'hello there\n'  | 
|
487  | 
, 'how are you today?\n'],  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
488  | 
[('equal', 0,1, 0,1),  | 
489  | 
('delete', 1,2, 1,1),  | 
|
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
490  | 
('equal', 2,3, 1,2),  | 
| 
1185.81.9
by John Arbash Meinel
 Added (failing) tests for cdv.recurse_matches with common sections,  | 
491  | 
                ])
 | 
492  | 
chk_ops('aBccDe', 'abccde',  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
493  | 
[('equal', 0,1, 0,1),  | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
494  | 
('replace', 1,5, 1,5),  | 
495  | 
('equal', 5,6, 5,6),  | 
|
496  | 
                ])
 | 
|
497  | 
chk_ops('aBcDec', 'abcdec',  | 
|
498  | 
[('equal', 0,1, 0,1),  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
499  | 
('replace', 1,2, 1,2),  | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
500  | 
('equal', 2,3, 2,3),  | 
501  | 
('replace', 3,4, 3,4),  | 
|
502  | 
('equal', 4,6, 4,6),  | 
|
| 
1185.81.3
by John Arbash Meinel
 Adding tests for checking opcodes.  | 
503  | 
                ])
 | 
| 
1185.81.10
by John Arbash Meinel
 Added some more test cases.  | 
504  | 
chk_ops('aBcdEcdFg', 'abcdecdfg',  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
505  | 
[('equal', 0,1, 0,1),  | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
506  | 
('replace', 1,8, 1,8),  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
507  | 
('equal', 8,9, 8,9)  | 
| 
1185.81.10
by John Arbash Meinel
 Added some more test cases.  | 
508  | 
                ])
 | 
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
509  | 
chk_ops('aBcdEeXcdFg', 'abcdecdfg',  | 
510  | 
[('equal', 0,1, 0,1),  | 
|
511  | 
('replace', 1,2, 1,2),  | 
|
512  | 
('equal', 2,4, 2,4),  | 
|
513  | 
('delete', 4,5, 4,4),  | 
|
514  | 
('equal', 5,6, 4,5),  | 
|
515  | 
('delete', 6,7, 5,5),  | 
|
516  | 
('equal', 7,9, 5,7),  | 
|
517  | 
('replace', 9,10, 7,8),  | 
|
518  | 
('equal', 10,11, 8,9)  | 
|
519  | 
                ])
 | 
|
| 
1185.81.10
by John Arbash Meinel
 Added some more test cases.  | 
520  | 
|
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
521  | 
def test_multiple_ranges(self):  | 
522  | 
        # There was an earlier bug where we used a bad set of ranges,
 | 
|
523  | 
        # this triggers that specific bug, to make sure it doesn't regress
 | 
|
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
524  | 
def chk_blocks(a, b, expected_blocks):  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
525  | 
            # difflib always adds a signature of the total
 | 
526  | 
            # length, with no matching entries at the end
 | 
|
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
527  | 
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
528  | 
blocks = s.get_matching_blocks()  | 
529  | 
x = blocks.pop()  | 
|
530  | 
self.assertEquals(x, (len(a), len(b), 0))  | 
|
| 
1711.2.10
by John Arbash Meinel
 Clarify the patience tests a little bit.  | 
531  | 
self.assertEquals(expected_blocks, blocks)  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
532  | 
|
533  | 
chk_blocks('abcdefghijklmnop'  | 
|
534  | 
, 'abcXghiYZQRSTUVWXYZijklmnop'  | 
|
535  | 
, [(0, 0, 3), (6, 4, 3), (9, 20, 7)])  | 
|
536  | 
||
537  | 
chk_blocks('ABCd efghIjk L'  | 
|
538  | 
, 'AxyzBCn mo pqrstuvwI1 2 L'  | 
|
| 
1711.2.21
by John Arbash Meinel
 Cleanup patiencediff, remove the use of difflib.SequenceMatcher.  | 
539  | 
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])  | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
540  | 
|
| 
1711.2.8
by John Arbash Meinel
 rot13 the code snippet to help with clarity.  | 
541  | 
        # These are rot13 code snippets.
 | 
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
542  | 
chk_blocks('''\  | 
| 
1711.2.8
by John Arbash Meinel
 rot13 the code snippet to help with clarity.  | 
543  | 
    trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
 | 
544  | 
    """
 | 
|
545  | 
    gnxrf_netf = ['svyr*']
 | 
|
546  | 
    gnxrf_bcgvbaf = ['ab-erphefr']
 | 
|
547  | 
  
 | 
|
548  | 
    qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr):
 | 
|
549  | 
        sebz omeyvo.nqq vzcbeg fzneg_nqq, nqq_ercbegre_cevag, nqq_ercbegre_ahyy
 | 
|
550  | 
        vs vf_dhvrg():
 | 
|
551  | 
            ercbegre = nqq_ercbegre_ahyy
 | 
|
552  | 
        ryfr:
 | 
|
553  | 
            ercbegre = nqq_ercbegre_cevag
 | 
|
554  | 
        fzneg_nqq(svyr_yvfg, abg ab_erphefr, ercbegre)
 | 
|
555  | 
||
556  | 
||
557  | 
pynff pzq_zxqve(Pbzznaq):
 | 
|
558  | 
'''.splitlines(True), '''\  | 
|
559  | 
    trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
 | 
|
560  | 
||
561  | 
    --qel-eha jvyy fubj juvpu svyrf jbhyq or nqqrq, ohg abg npghnyyl 
 | 
|
562  | 
    nqq gurz.
 | 
|
563  | 
    """
 | 
|
564  | 
    gnxrf_netf = ['svyr*']
 | 
|
565  | 
    gnxrf_bcgvbaf = ['ab-erphefr', 'qel-eha']
 | 
|
566  | 
||
567  | 
    qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr, qel_eha=Snyfr):
 | 
|
568  | 
        vzcbeg omeyvo.nqq
 | 
|
569  | 
||
570  | 
        vs qel_eha:
 | 
|
571  | 
            vs vf_dhvrg():
 | 
|
572  | 
                # Guvf vf cbvagyrff, ohg V'q engure abg envfr na reebe
 | 
|
573  | 
                npgvba = omeyvo.nqq.nqq_npgvba_ahyy
 | 
|
574  | 
            ryfr:
 | 
|
575  | 
  npgvba = omeyvo.nqq.nqq_npgvba_cevag
 | 
|
576  | 
        ryvs vf_dhvrg():
 | 
|
577  | 
            npgvba = omeyvo.nqq.nqq_npgvba_nqq
 | 
|
578  | 
        ryfr:
 | 
|
579  | 
       npgvba = omeyvo.nqq.nqq_npgvba_nqq_naq_cevag
 | 
|
580  | 
||
581  | 
        omeyvo.nqq.fzneg_nqq(svyr_yvfg, abg ab_erphefr, npgvba)
 | 
|
582  | 
||
583  | 
||
584  | 
pynff pzq_zxqve(Pbzznaq):
 | 
|
| 
1185.81.16
by John Arbash Meinel
 Added tests, and an assert check to make sure ranges are always increasing.  | 
585  | 
'''.splitlines(True)  | 
586  | 
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])  | 
|
587  | 
||
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
588  | 
def test_patience_unified_diff(self):  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
589  | 
txt_a = ['hello there\n',  | 
590  | 
'world\n',  | 
|
591  | 
'how are you today?\n']  | 
|
592  | 
txt_b = ['hello there\n',  | 
|
593  | 
'how are you today?\n']  | 
|
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
594  | 
unified_diff = bzrlib.patiencediff.unified_diff  | 
595  | 
psm = bzrlib.patiencediff.PatienceSequenceMatcher  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
596  | 
self.assertEquals([ '--- \n',  | 
597  | 
'+++ \n',  | 
|
598  | 
'@@ -1,3 +1,2 @@\n',  | 
|
599  | 
' hello there\n',  | 
|
600  | 
'-world\n',  | 
|
601  | 
' how are you today?\n'  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
602  | 
                          ]
 | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
603  | 
, list(unified_diff(txt_a, txt_b,  | 
604  | 
sequencematcher=psm)))  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
605  | 
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')  | 
606  | 
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')  | 
|
607  | 
        # This is the result with LongestCommonSubstring matching
 | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
608  | 
self.assertEquals(['--- \n',  | 
609  | 
'+++ \n',  | 
|
610  | 
'@@ -1,6 +1,11 @@\n',  | 
|
611  | 
' a\n',  | 
|
612  | 
' b\n',  | 
|
613  | 
' c\n',  | 
|
614  | 
'+d\n',  | 
|
615  | 
'+e\n',  | 
|
616  | 
'+f\n',  | 
|
617  | 
'+x\n',  | 
|
618  | 
'+y\n',  | 
|
619  | 
' d\n',  | 
|
620  | 
' e\n',  | 
|
621  | 
' f\n']  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
622  | 
, list(unified_diff(txt_a, txt_b)))  | 
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
623  | 
        # And the patience diff
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
624  | 
self.assertEquals(['--- \n',  | 
625  | 
'+++ \n',  | 
|
626  | 
'@@ -4,6 +4,11 @@\n',  | 
|
627  | 
' d\n',  | 
|
628  | 
' e\n',  | 
|
629  | 
' f\n',  | 
|
630  | 
'+x\n',  | 
|
631  | 
'+y\n',  | 
|
632  | 
'+d\n',  | 
|
633  | 
'+e\n',  | 
|
634  | 
'+f\n',  | 
|
635  | 
' g\n',  | 
|
636  | 
' h\n',  | 
|
637  | 
' i\n',  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
638  | 
                          ]
 | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
639  | 
, list(unified_diff(txt_a, txt_b,  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
640  | 
sequencematcher=psm)))  | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
641  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
642  | 
|
| 
1711.2.15
by John Arbash Meinel
 Found a couple CDV left  | 
643  | 
class TestPatienceDiffLibFiles(TestCaseInTempDir):  | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
644  | 
|
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
645  | 
def test_patience_unified_diff_files(self):  | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
646  | 
txt_a = ['hello there\n',  | 
647  | 
'world\n',  | 
|
648  | 
'how are you today?\n']  | 
|
649  | 
txt_b = ['hello there\n',  | 
|
650  | 
'how are you today?\n']  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
651  | 
open('a1', 'wb').writelines(txt_a)  | 
652  | 
open('b1', 'wb').writelines(txt_b)  | 
|
653  | 
||
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
654  | 
unified_diff_files = bzrlib.patiencediff.unified_diff_files  | 
655  | 
psm = bzrlib.patiencediff.PatienceSequenceMatcher  | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
656  | 
self.assertEquals(['--- a1 \n',  | 
657  | 
'+++ b1 \n',  | 
|
658  | 
'@@ -1,3 +1,2 @@\n',  | 
|
659  | 
' hello there\n',  | 
|
660  | 
'-world\n',  | 
|
661  | 
' how are you today?\n',  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
662  | 
                          ]
 | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
663  | 
, list(unified_diff_files('a1', 'b1',  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
664  | 
sequencematcher=psm)))  | 
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
665  | 
|
666  | 
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')  | 
|
667  | 
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')  | 
|
668  | 
open('a2', 'wb').writelines(txt_a)  | 
|
669  | 
open('b2', 'wb').writelines(txt_b)  | 
|
670  | 
||
671  | 
        # This is the result with LongestCommonSubstring matching
 | 
|
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
672  | 
self.assertEquals(['--- a2 \n',  | 
673  | 
'+++ b2 \n',  | 
|
674  | 
'@@ -1,6 +1,11 @@\n',  | 
|
675  | 
' a\n',  | 
|
676  | 
' b\n',  | 
|
677  | 
' c\n',  | 
|
678  | 
'+d\n',  | 
|
679  | 
'+e\n',  | 
|
680  | 
'+f\n',  | 
|
681  | 
'+x\n',  | 
|
682  | 
'+y\n',  | 
|
683  | 
' d\n',  | 
|
684  | 
' e\n',  | 
|
685  | 
' f\n']  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
686  | 
, list(unified_diff_files('a2', 'b2')))  | 
687  | 
||
| 
1711.2.9
by John Arbash Meinel
 Rename cdv => patience  | 
688  | 
        # And the patience diff
 | 
| 
1185.81.29
by Aaron Bentley
 Fix style issues and duplicated tests  | 
689  | 
self.assertEquals(['--- a2 \n',  | 
690  | 
'+++ b2 \n',  | 
|
691  | 
'@@ -4,6 +4,11 @@\n',  | 
|
692  | 
' d\n',  | 
|
693  | 
' e\n',  | 
|
694  | 
' f\n',  | 
|
695  | 
'+x\n',  | 
|
696  | 
'+y\n',  | 
|
697  | 
'+d\n',  | 
|
698  | 
'+e\n',  | 
|
699  | 
'+f\n',  | 
|
700  | 
' g\n',  | 
|
701  | 
' h\n',  | 
|
702  | 
' i\n',  | 
|
| 
1185.81.14
by John Arbash Meinel
 Added a main function for running cdvdifflib manually, included tests for unified_diff interfaces  | 
703  | 
                          ]
 | 
| 
1185.81.25
by Aaron Bentley
 Clean up test_diff  | 
704  | 
, list(unified_diff_files('a2', 'b2',  | 
| 
1711.2.20
by John Arbash Meinel
 Late bind to patiencediff objects to make it easier to plug-in  | 
705  | 
sequencematcher=psm)))  |