/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
1
# groupcompress, a bzr plugin providing new compression logic.
2
# Copyright (C) 2008 Canonical Limited.
3
# 
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License version 2 as published
6
# by the Free Software Foundation.
7
# 
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
# 
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
16
# 
17
18
"""Tests for group compression."""
19
20
import zlib
21
22
from bzrlib import tests
0.17.2 by Robert Collins
Core proof of concept working.
23
from bzrlib.osutils import sha_strings
0.23.9 by John Arbash Meinel
We now basically have full support for using diff-delta as the compressor.
24
from bzrlib.plugins.groupcompress_rabin import errors, groupcompress
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
25
from bzrlib.tests import (
26
    TestCaseWithTransport,
27
    TestScenarioApplier,
28
    adapt_tests,
29
    )
30
from bzrlib.transport import get_transport
31
32
33
def load_tests(standard_tests, module, loader):
34
    from bzrlib.tests.test_versionedfile import TestVersionedFiles
35
    vf_interface_tests = loader.loadTestsFromTestCase(TestVersionedFiles)
36
    cleanup_pack_group = groupcompress.cleanup_pack_group
37
    make_pack_factory = groupcompress.make_pack_factory
0.23.9 by John Arbash Meinel
We now basically have full support for using diff-delta as the compressor.
38
    group_scenario = ('groupcompressrabin-nograph', {
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
39
            'cleanup':cleanup_pack_group,
40
            'factory':make_pack_factory(False, False, 1),
41
            'graph': False,
42
            'key_length':1,
0.20.21 by John Arbash Meinel
Merge the chk sorting code.
43
            'support_partial_insertion':False,
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
44
            }
45
        )
46
    applier = TestScenarioApplier()
47
    applier.scenarios = [group_scenario]
48
    adapt_tests(vf_interface_tests, applier, standard_tests)
49
    return standard_tests
50
51
0.17.2 by Robert Collins
Core proof of concept working.
52
class TestGroupCompressor(TestCaseWithTransport):
53
    """Tests for GroupCompressor"""
54
55
    def test_empty_delta(self):
56
        compressor = groupcompress.GroupCompressor(True)
57
        self.assertEqual([], compressor.lines)
58
59
    def test_one_nosha_delta(self):
60
        # diff against NUKK
61
        compressor = groupcompress.GroupCompressor(True)
62
        sha1, end_point = compressor.compress(('label',),
63
            ['strange\n', 'common\n'], None)
64
        self.assertEqual(sha_strings(['strange\n', 'common\n']), sha1)
65
        expected_lines = [
0.23.27 by John Arbash Meinel
Fix up some failing tests.
66
            'fulltext\n',
67
            'label:label\nsha1:%s\n' % sha1,
68
            'len:15\n',
69
            'strange\ncommon\n',
0.17.2 by Robert Collins
Core proof of concept working.
70
            ]
71
        self.assertEqual(expected_lines, compressor.lines)
72
        self.assertEqual(sum(map(len, expected_lines)), end_point)
73
74
    def test_two_nosha_delta(self):
75
        compressor = groupcompress.GroupCompressor(True)
76
        sha1_1, _ = compressor.compress(('label',),
0.23.27 by John Arbash Meinel
Fix up some failing tests.
77
            ['strange\n', 'common very very very long line\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
78
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
79
        sha1_2, end_point = compressor.compress(('newlabel',),
0.23.27 by John Arbash Meinel
Fix up some failing tests.
80
            ['common very very very long line\n', 'different\n'], None)
81
        self.assertEqual(sha_strings(['common very very very long line\n',
82
                                      'different\n']), sha1_2)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
83
        expected_lines.extend([
0.23.9 by John Arbash Meinel
We now basically have full support for using diff-delta as the compressor.
84
            'delta\n'
0.23.27 by John Arbash Meinel
Fix up some failing tests.
85
            'label:newlabel\n',
86
            'sha1:%s\n' % sha1_2,
87
            'len:16\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
88
            # copy the line common
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
89
            'c,72,17\n',
90
            # add the line different, and the trailing newline
91
            'i,2\n',
0.17.2 by Robert Collins
Core proof of concept working.
92
            'different\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
93
            ])
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
94
        self.assertEqualDiff(''.join(expected_lines), ''.join(compressor.lines))
0.17.2 by Robert Collins
Core proof of concept working.
95
        self.assertEqual(sum(map(len, expected_lines)), end_point)
96
97
    def test_three_nosha_delta(self):
98
        # The first interesting test: make a change that should use lines from
99
        # both parents.
100
        compressor = groupcompress.GroupCompressor(True)
101
        sha1_1, end_point = compressor.compress(('label',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
102
            ['strange\n', 'common long line\n'], None)
0.17.2 by Robert Collins
Core proof of concept working.
103
        sha1_2, _ = compressor.compress(('newlabel',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
104
            ['common long line\n', 'different\n', 'moredifferent\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
105
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
106
        sha1_3, end_point = compressor.compress(('label3',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
107
            ['new\n', 'common long line\n', 'different\n', 'moredifferent\n'],
108
            None)
0.17.2 by Robert Collins
Core proof of concept working.
109
        self.assertEqual(
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
110
            sha_strings(['new\n', 'common long line\n', 'different\n',
111
                         'moredifferent\n']),
0.17.2 by Robert Collins
Core proof of concept working.
112
            sha1_3)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
113
        expected_lines.extend([
0.23.27 by John Arbash Meinel
Fix up some failing tests.
114
            'delta\n',
115
            'label:label3\n',
116
            'sha1:%s\n' % sha1_3,
117
            'len:11\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
118
            # insert new
119
            'i,1\n',
0.17.2 by Robert Collins
Core proof of concept working.
120
            'new\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
121
            # copy the line common
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
122
            'c,72,17\n',
123
            # copy the lines different, moredifferent and trailing newline
124
            'c,165,25\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
125
            ])
0.18.6 by John Arbash Meinel
Use the new EquivalenceTable to track the lines.
126
        self.assertEqualDiff(''.join(expected_lines),
127
                             ''.join(compressor.lines))
0.17.2 by Robert Collins
Core proof of concept working.
128
        self.assertEqual(sum(map(len, expected_lines)), end_point)
129
130
    def test_stats(self):
131
        compressor = groupcompress.GroupCompressor(True)
132
        compressor.compress(('label',),
133
            ['strange\n', 'common\n'], None)
134
        compressor.compress(('newlabel',),
135
            ['common\n', 'different\n', 'moredifferent\n'], None)
136
        compressor.compress(('label3',),
137
            ['new\n', 'common\n', 'different\n', 'moredifferent\n'], None)
138
        self.assertAlmostEqual(0.3, compressor.ratio(), 1)
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
139
140
    def test_extract_from_compressor(self):
141
        # Knit fetching will try to reconstruct texts locally which results in
142
        # reading something that is in the compressor stream already.
143
        compressor = groupcompress.GroupCompressor(True)
144
        sha_1,  _ = compressor.compress(('label',),
145
            ['strange\n', 'common\n'], None)
146
        sha_2, _ = compressor.compress(('newlabel',),
147
            ['common\n', 'different\n', 'moredifferent\n'], None)
148
        # get the first out
0.23.27 by John Arbash Meinel
Fix up some failing tests.
149
        self.assertEqual((['strange\ncommon\n'], sha_1),
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
150
            compressor.extract(('label',)))
151
        # and the second
0.23.27 by John Arbash Meinel
Fix up some failing tests.
152
        self.assertEqual((['common\ndifferent\nmoredifferent\n'],
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
153
            sha_2), compressor.extract(('newlabel',)))