1
# Copyright (C) 2005, 2006 Canonical Development Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
from cStringIO import StringIO
20
from tempfile import TemporaryFile
22
from bzrlib.diff import internal_diff, external_diff, show_diff_trees
23
from bzrlib.errors import BinaryFile, NoDiff
24
import bzrlib.patiencediff
25
from bzrlib.tests import (TestCase, TestCaseWithTransport,
26
TestCaseInTempDir, TestSkipped)
29
def udiff_lines(old, new, allow_binary=False):
31
internal_diff('old', old, 'new', new, output, allow_binary)
33
return output.readlines()
36
def external_udiff_lines(old, new, use_stringio=False):
38
# StringIO has no fileno, so it tests a different codepath
41
output = TemporaryFile()
43
external_diff('old', old, 'new', new, output, diff_opts=['-u'])
45
raise TestSkipped('external "diff" not present to test')
47
lines = output.readlines()
52
class TestDiff(TestCase):
54
def test_add_nl(self):
55
"""diff generates a valid diff for patches that add a newline"""
56
lines = udiff_lines(['boo'], ['boo\n'])
57
self.check_patch(lines)
58
self.assertEquals(lines[4], '\\ No newline at end of file\n')
59
## "expected no-nl, got %r" % lines[4]
61
def test_add_nl_2(self):
62
"""diff generates a valid diff for patches that change last line and
65
lines = udiff_lines(['boo'], ['goo\n'])
66
self.check_patch(lines)
67
self.assertEquals(lines[4], '\\ No newline at end of file\n')
68
## "expected no-nl, got %r" % lines[4]
70
def test_remove_nl(self):
71
"""diff generates a valid diff for patches that change last line and
74
lines = udiff_lines(['boo\n'], ['boo'])
75
self.check_patch(lines)
76
self.assertEquals(lines[5], '\\ No newline at end of file\n')
77
## "expected no-nl, got %r" % lines[5]
79
def check_patch(self, lines):
80
self.assert_(len(lines) > 1)
81
## "Not enough lines for a file header for patch:\n%s" % "".join(lines)
82
self.assert_(lines[0].startswith ('---'))
83
## 'No orig line for patch:\n%s' % "".join(lines)
84
self.assert_(lines[1].startswith ('+++'))
85
## 'No mod line for patch:\n%s' % "".join(lines)
86
self.assert_(len(lines) > 2)
87
## "No hunks for patch:\n%s" % "".join(lines)
88
self.assert_(lines[2].startswith('@@'))
89
## "No hunk header for patch:\n%s" % "".join(lines)
90
self.assert_('@@' in lines[2][2:])
91
## "Unterminated hunk header for patch:\n%s" % "".join(lines)
93
def test_binary_lines(self):
94
self.assertRaises(BinaryFile, udiff_lines, [1023 * 'a' + '\x00'], [])
95
self.assertRaises(BinaryFile, udiff_lines, [], [1023 * 'a' + '\x00'])
96
udiff_lines([1023 * 'a' + '\x00'], [], allow_binary=True)
97
udiff_lines([], [1023 * 'a' + '\x00'], allow_binary=True)
99
def test_external_diff(self):
100
lines = external_udiff_lines(['boo\n'], ['goo\n'])
101
self.check_patch(lines)
103
def test_external_diff_no_fileno(self):
104
# Make sure that we can handle not having a fileno, even
105
# if the diff is large
106
lines = external_udiff_lines(['boo\n']*10000,
109
self.check_patch(lines)
111
def test_internal_diff_default(self):
112
# Default internal diff encoding is utf8
114
internal_diff(u'old_\xb5', ['old_text\n'],
115
u'new_\xe5', ['new_text\n'], output)
116
lines = output.getvalue().splitlines(True)
117
self.check_patch(lines)
118
self.assertEquals(['--- old_\xc2\xb5\n',
119
'+++ new_\xc3\xa5\n',
127
def test_internal_diff_utf8(self):
129
internal_diff(u'old_\xb5', ['old_text\n'],
130
u'new_\xe5', ['new_text\n'], output,
131
path_encoding='utf8')
132
lines = output.getvalue().splitlines(True)
133
self.check_patch(lines)
134
self.assertEquals(['--- old_\xc2\xb5\n',
135
'+++ new_\xc3\xa5\n',
143
def test_internal_diff_iso_8859_1(self):
145
internal_diff(u'old_\xb5', ['old_text\n'],
146
u'new_\xe5', ['new_text\n'], output,
147
path_encoding='iso-8859-1')
148
lines = output.getvalue().splitlines(True)
149
self.check_patch(lines)
150
self.assertEquals(['--- old_\xb5\n',
159
def test_internal_diff_returns_bytes(self):
161
output = StringIO.StringIO()
162
internal_diff(u'old_\xb5', ['old_text\n'],
163
u'new_\xe5', ['new_text\n'], output)
164
self.failUnless(isinstance(output.getvalue(), str),
165
'internal_diff should return bytestrings')
168
class TestDiffDates(TestCaseWithTransport):
171
super(TestDiffDates, self).setUp()
172
self.wt = self.make_branch_and_tree('.')
173
self.b = self.wt.branch
174
self.build_tree_contents([
175
('file1', 'file1 contents at rev 1\n'),
176
('file2', 'file2 contents at rev 1\n')
178
self.wt.add(['file1', 'file2'])
180
message='Revision 1',
181
timestamp=1143849600, # 2006-04-01 00:00:00 UTC
184
self.build_tree_contents([('file1', 'file1 contents at rev 2\n')])
186
message='Revision 2',
187
timestamp=1143936000, # 2006-04-02 00:00:00 UTC
190
self.build_tree_contents([('file2', 'file2 contents at rev 3\n')])
192
message='Revision 3',
193
timestamp=1144022400, # 2006-04-03 00:00:00 UTC
196
self.wt.remove(['file2'])
198
message='Revision 4',
199
timestamp=1144108800, # 2006-04-04 00:00:00 UTC
202
self.build_tree_contents([
203
('file1', 'file1 contents in working tree\n')
205
# set the date stamps for files in the working tree to known values
206
os.utime('file1', (1144195200, 1144195200)) # 2006-04-05 00:00:00 UTC
208
def get_diff(self, tree1, tree2):
210
show_diff_trees(tree1, tree2, output,
211
old_label='old/', new_label='new/')
212
return output.getvalue()
214
def test_diff_rev_tree_working_tree(self):
215
output = self.get_diff(self.wt.basis_tree(), self.wt)
216
# note that the date for old/file1 is from rev 2 rather than from
217
# the basis revision (rev 4)
218
self.assertEqualDiff(output, '''\
219
=== modified file 'file1'
220
--- old/file1\t2006-04-02 00:00:00 +0000
221
+++ new/file1\t2006-04-05 00:00:00 +0000
223
-file1 contents at rev 2
224
+file1 contents in working tree
228
def test_diff_rev_tree_rev_tree(self):
229
tree1 = self.b.repository.revision_tree('rev-2')
230
tree2 = self.b.repository.revision_tree('rev-3')
231
output = self.get_diff(tree1, tree2)
232
self.assertEqualDiff(output, '''\
233
=== modified file 'file2'
234
--- old/file2\t2006-04-01 00:00:00 +0000
235
+++ new/file2\t2006-04-03 00:00:00 +0000
237
-file2 contents at rev 1
238
+file2 contents at rev 3
242
def test_diff_add_files(self):
243
tree1 = self.b.repository.revision_tree(None)
244
tree2 = self.b.repository.revision_tree('rev-1')
245
output = self.get_diff(tree1, tree2)
246
# the files have the epoch time stamp for the tree in which
248
self.assertEqualDiff(output, '''\
249
=== added file 'file1'
250
--- old/file1\t1970-01-01 00:00:00 +0000
251
+++ new/file1\t2006-04-01 00:00:00 +0000
253
+file1 contents at rev 1
255
=== added file 'file2'
256
--- old/file2\t1970-01-01 00:00:00 +0000
257
+++ new/file2\t2006-04-01 00:00:00 +0000
259
+file2 contents at rev 1
263
def test_diff_remove_files(self):
264
tree1 = self.b.repository.revision_tree('rev-3')
265
tree2 = self.b.repository.revision_tree('rev-4')
266
output = self.get_diff(tree1, tree2)
267
# the file has the epoch time stamp for the tree in which
269
self.assertEqualDiff(output, '''\
270
=== removed file 'file2'
271
--- old/file2\t2006-04-03 00:00:00 +0000
272
+++ new/file2\t1970-01-01 00:00:00 +0000
274
-file2 contents at rev 3
278
def test_show_diff_specified(self):
279
"""A working-tree id can be used to identify a file"""
280
self.wt.rename_one('file1', 'file1b')
281
old_tree = self.b.repository.revision_tree('rev-1')
282
new_tree = self.b.repository.revision_tree('rev-4')
283
out_file = StringIO()
284
show_diff_trees(old_tree, new_tree, to_file=out_file,
285
specific_files=['file1b'], extra_trees=[self.wt])
286
self.assertContainsRe(out_file.getvalue(), 'file1\t')
289
class TestPatienceDiffLib(TestCase):
291
def test_unique_lcs(self):
292
unique_lcs = bzrlib.patiencediff.unique_lcs
293
self.assertEquals(unique_lcs('', ''), [])
294
self.assertEquals(unique_lcs('a', 'a'), [(0,0)])
295
self.assertEquals(unique_lcs('a', 'b'), [])
296
self.assertEquals(unique_lcs('ab', 'ab'), [(0,0), (1,1)])
297
self.assertEquals(unique_lcs('abcde', 'cdeab'), [(2,0), (3,1), (4,2)])
298
self.assertEquals(unique_lcs('cdeab', 'abcde'), [(0,2), (1,3), (2,4)])
299
self.assertEquals(unique_lcs('abXde', 'abYde'), [(0,0), (1,1),
301
self.assertEquals(unique_lcs('acbac', 'abc'), [(2,1)])
303
def test_recurse_matches(self):
304
def test_one(a, b, matches):
306
bzrlib.patiencediff.recurse_matches(a, b, 0, 0, len(a), len(b),
308
self.assertEquals(test_matches, matches)
310
test_one(['a', '', 'b', '', 'c'], ['a', 'a', 'b', 'c', 'c'],
311
[(0, 0), (2, 2), (4, 4)])
312
test_one(['a', 'c', 'b', 'a', 'c'], ['a', 'b', 'c'],
313
[(0, 0), (2, 1), (4, 2)])
315
# recurse_matches doesn't match non-unique
316
# lines surrounded by bogus text.
317
# The update has been done in patiencediff.SequenceMatcher instead
319
# This is what it could be
320
#test_one('aBccDe', 'abccde', [(0,0), (2,2), (3,3), (5,5)])
322
# This is what it currently gives:
323
test_one('aBccDe', 'abccde', [(0,0), (5,5)])
325
def test_matching_blocks(self):
326
def chk_blocks(a, b, expected_blocks):
327
# difflib always adds a signature of the total
328
# length, with no matching entries at the end
329
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
330
blocks = s.get_matching_blocks()
331
self.assertEquals((len(a), len(b), 0), blocks[-1])
332
self.assertEquals(expected_blocks, blocks[:-1])
334
# Some basic matching tests
335
chk_blocks('', '', [])
336
chk_blocks([], [], [])
337
chk_blocks('abcd', 'abcd', [(0, 0, 4)])
338
chk_blocks('abcd', 'abce', [(0, 0, 3)])
339
chk_blocks('eabc', 'abce', [(1, 0, 3)])
340
chk_blocks('eabce', 'abce', [(1, 0, 4)])
341
chk_blocks('abcde', 'abXde', [(0, 0, 2), (3, 3, 2)])
342
chk_blocks('abcde', 'abXYZde', [(0, 0, 2), (3, 5, 2)])
343
chk_blocks('abde', 'abXYZde', [(0, 0, 2), (2, 5, 2)])
344
# This may check too much, but it checks to see that
345
# a copied block stays attached to the previous section,
347
# difflib would tend to grab the trailing longest match
348
# which would make the diff not look right
349
chk_blocks('abcdefghijklmnop', 'abcdefxydefghijklmnop',
350
[(0, 0, 6), (6, 11, 10)])
352
# make sure it supports passing in lists
356
'how are you today?\n'],
358
'how are you today?\n'],
359
[(0, 0, 1), (2, 1, 1)])
361
# non unique lines surrounded by non-matching lines
363
chk_blocks('aBccDe', 'abccde', [(0,0,1), (5,5,1)])
365
# But they only need to be locally unique
366
chk_blocks('aBcDec', 'abcdec', [(0,0,1), (2,2,1), (4,4,2)])
368
# non unique blocks won't be matched
369
chk_blocks('aBcdEcdFg', 'abcdecdfg', [(0,0,1), (8,8,1)])
371
# but locally unique ones will
372
chk_blocks('aBcdEeXcdFg', 'abcdecdfg', [(0,0,1), (2,2,2),
373
(5,4,1), (7,5,2), (10,8,1)])
375
chk_blocks('abbabbXd', 'cabbabxd', [(7,7,1)])
376
chk_blocks('abbabbbb', 'cabbabbc', [])
377
chk_blocks('bbbbbbbb', 'cbbbbbbc', [])
379
def test_opcodes(self):
380
def chk_ops(a, b, expected_codes):
381
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
382
self.assertEquals(expected_codes, s.get_opcodes())
386
chk_ops('abcd', 'abcd', [('equal', 0,4, 0,4)])
387
chk_ops('abcd', 'abce', [('equal', 0,3, 0,3),
388
('replace', 3,4, 3,4)
390
chk_ops('eabc', 'abce', [('delete', 0,1, 0,0),
394
chk_ops('eabce', 'abce', [('delete', 0,1, 0,0),
397
chk_ops('abcde', 'abXde', [('equal', 0,2, 0,2),
398
('replace', 2,3, 2,3),
401
chk_ops('abcde', 'abXYZde', [('equal', 0,2, 0,2),
402
('replace', 2,3, 2,5),
405
chk_ops('abde', 'abXYZde', [('equal', 0,2, 0,2),
406
('insert', 2,2, 2,5),
409
chk_ops('abcdefghijklmnop', 'abcdefxydefghijklmnop',
410
[('equal', 0,6, 0,6),
411
('insert', 6,6, 6,11),
412
('equal', 6,16, 11,21)
417
, 'how are you today?\n'],
419
, 'how are you today?\n'],
420
[('equal', 0,1, 0,1),
421
('delete', 1,2, 1,1),
424
chk_ops('aBccDe', 'abccde',
425
[('equal', 0,1, 0,1),
426
('replace', 1,5, 1,5),
429
chk_ops('aBcDec', 'abcdec',
430
[('equal', 0,1, 0,1),
431
('replace', 1,2, 1,2),
433
('replace', 3,4, 3,4),
436
chk_ops('aBcdEcdFg', 'abcdecdfg',
437
[('equal', 0,1, 0,1),
438
('replace', 1,8, 1,8),
441
chk_ops('aBcdEeXcdFg', 'abcdecdfg',
442
[('equal', 0,1, 0,1),
443
('replace', 1,2, 1,2),
445
('delete', 4,5, 4,4),
447
('delete', 6,7, 5,5),
449
('replace', 9,10, 7,8),
450
('equal', 10,11, 8,9)
453
def test_multiple_ranges(self):
454
# There was an earlier bug where we used a bad set of ranges,
455
# this triggers that specific bug, to make sure it doesn't regress
456
def chk_blocks(a, b, expected_blocks):
457
# difflib always adds a signature of the total
458
# length, with no matching entries at the end
459
s = bzrlib.patiencediff.PatienceSequenceMatcher(None, a, b)
460
blocks = s.get_matching_blocks()
462
self.assertEquals(x, (len(a), len(b), 0))
463
self.assertEquals(expected_blocks, blocks)
465
chk_blocks('abcdefghijklmnop'
466
, 'abcXghiYZQRSTUVWXYZijklmnop'
467
, [(0, 0, 3), (6, 4, 3), (9, 20, 7)])
469
chk_blocks('ABCd efghIjk L'
470
, 'AxyzBCn mo pqrstuvwI1 2 L'
471
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])
473
# These are rot13 code snippets.
475
trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
477
gnxrf_netf = ['svyr*']
478
gnxrf_bcgvbaf = ['ab-erphefr']
480
qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr):
481
sebz omeyvo.nqq vzcbeg fzneg_nqq, nqq_ercbegre_cevag, nqq_ercbegre_ahyy
483
ercbegre = nqq_ercbegre_ahyy
485
ercbegre = nqq_ercbegre_cevag
486
fzneg_nqq(svyr_yvfg, abg ab_erphefr, ercbegre)
489
pynff pzq_zxqve(Pbzznaq):
490
'''.splitlines(True), '''\
491
trg nqqrq jura lbh nqq n svyr va gur qverpgbel.
493
--qel-eha jvyy fubj juvpu svyrf jbhyq or nqqrq, ohg abg npghnyyl
496
gnxrf_netf = ['svyr*']
497
gnxrf_bcgvbaf = ['ab-erphefr', 'qel-eha']
499
qrs eha(frys, svyr_yvfg, ab_erphefr=Snyfr, qel_eha=Snyfr):
504
# Guvf vf cbvagyrff, ohg V'q engure abg envfr na reebe
505
npgvba = omeyvo.nqq.nqq_npgvba_ahyy
507
npgvba = omeyvo.nqq.nqq_npgvba_cevag
509
npgvba = omeyvo.nqq.nqq_npgvba_nqq
511
npgvba = omeyvo.nqq.nqq_npgvba_nqq_naq_cevag
513
omeyvo.nqq.fzneg_nqq(svyr_yvfg, abg ab_erphefr, npgvba)
516
pynff pzq_zxqve(Pbzznaq):
518
, [(0,0,1), (1, 4, 2), (9, 19, 1), (12, 23, 3)])
520
def test_patience_unified_diff(self):
521
txt_a = ['hello there\n',
523
'how are you today?\n']
524
txt_b = ['hello there\n',
525
'how are you today?\n']
526
unified_diff = bzrlib.patiencediff.unified_diff
527
psm = bzrlib.patiencediff.PatienceSequenceMatcher
528
self.assertEquals([ '--- \n',
533
' how are you today?\n'
535
, list(unified_diff(txt_a, txt_b,
536
sequencematcher=psm)))
537
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
538
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
539
# This is the result with LongestCommonSubstring matching
540
self.assertEquals(['--- \n',
542
'@@ -1,6 +1,11 @@\n',
554
, list(unified_diff(txt_a, txt_b)))
555
# And the patience diff
556
self.assertEquals(['--- \n',
558
'@@ -4,6 +4,11 @@\n',
571
, list(unified_diff(txt_a, txt_b,
572
sequencematcher=psm)))
575
class TestPatienceDiffLibFiles(TestCaseInTempDir):
577
def test_patience_unified_diff_files(self):
578
txt_a = ['hello there\n',
580
'how are you today?\n']
581
txt_b = ['hello there\n',
582
'how are you today?\n']
583
open('a1', 'wb').writelines(txt_a)
584
open('b1', 'wb').writelines(txt_b)
586
unified_diff_files = bzrlib.patiencediff.unified_diff_files
587
psm = bzrlib.patiencediff.PatienceSequenceMatcher
588
self.assertEquals(['--- a1 \n',
593
' how are you today?\n',
595
, list(unified_diff_files('a1', 'b1',
596
sequencematcher=psm)))
598
txt_a = map(lambda x: x+'\n', 'abcdefghijklmnop')
599
txt_b = map(lambda x: x+'\n', 'abcdefxydefghijklmnop')
600
open('a2', 'wb').writelines(txt_a)
601
open('b2', 'wb').writelines(txt_b)
603
# This is the result with LongestCommonSubstring matching
604
self.assertEquals(['--- a2 \n',
606
'@@ -1,6 +1,11 @@\n',
618
, list(unified_diff_files('a2', 'b2')))
620
# And the patience diff
621
self.assertEquals(['--- a2 \n',
623
'@@ -4,6 +4,11 @@\n',
636
, list(unified_diff_files('a2', 'b2',
637
sequencematcher=psm)))