/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
1
# groupcompress, a bzr plugin providing new compression logic.
2
# Copyright (C) 2008 Canonical Limited.
3
# 
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License version 2 as published
6
# by the Free Software Foundation.
7
# 
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
# 
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
16
# 
17
18
"""Tests for group compression."""
19
20
import zlib
21
22
from bzrlib import tests
0.23.58 by John Arbash Meinel
fix up the failing tests.
23
from bzrlib.osutils import sha_string
0.17.31 by John Arbash Meinel
Bring in the 'rabin' experiment.
24
from bzrlib.plugins.groupcompress import errors, groupcompress
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
25
from bzrlib.tests import (
26
    TestCaseWithTransport,
27
    TestScenarioApplier,
28
    adapt_tests,
29
    )
30
from bzrlib.transport import get_transport
31
32
33
def load_tests(standard_tests, module, loader):
34
    from bzrlib.tests.test_versionedfile import TestVersionedFiles
35
    vf_interface_tests = loader.loadTestsFromTestCase(TestVersionedFiles)
36
    cleanup_pack_group = groupcompress.cleanup_pack_group
37
    make_pack_factory = groupcompress.make_pack_factory
0.23.9 by John Arbash Meinel
We now basically have full support for using diff-delta as the compressor.
38
    group_scenario = ('groupcompressrabin-nograph', {
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
39
            'cleanup':cleanup_pack_group,
40
            'factory':make_pack_factory(False, False, 1),
41
            'graph': False,
42
            'key_length':1,
0.20.21 by John Arbash Meinel
Merge the chk sorting code.
43
            'support_partial_insertion':False,
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
44
            }
45
        )
46
    applier = TestScenarioApplier()
47
    applier.scenarios = [group_scenario]
48
    adapt_tests(vf_interface_tests, applier, standard_tests)
49
    return standard_tests
50
51
0.25.2 by John Arbash Meinel
First cut at meta-info as text form.
52
class TestGroupCompressor(tests.TestCase):
0.17.2 by Robert Collins
Core proof of concept working.
53
    """Tests for GroupCompressor"""
54
55
    def test_empty_delta(self):
56
        compressor = groupcompress.GroupCompressor(True)
57
        self.assertEqual([], compressor.lines)
58
59
    def test_one_nosha_delta(self):
60
        # diff against NUKK
61
        compressor = groupcompress.GroupCompressor(True)
62
        sha1, end_point = compressor.compress(('label',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
63
            'strange\ncommon\n', None)
64
        self.assertEqual(sha_string('strange\ncommon\n'), sha1)
0.17.2 by Robert Collins
Core proof of concept working.
65
        expected_lines = [
0.23.27 by John Arbash Meinel
Fix up some failing tests.
66
            'fulltext\n',
67
            'label:label\nsha1:%s\n' % sha1,
68
            'len:15\n',
69
            'strange\ncommon\n',
0.17.2 by Robert Collins
Core proof of concept working.
70
            ]
71
        self.assertEqual(expected_lines, compressor.lines)
72
        self.assertEqual(sum(map(len, expected_lines)), end_point)
73
0.23.58 by John Arbash Meinel
fix up the failing tests.
74
    def _chunks_to_repr_lines(self, chunks):
75
        return '\n'.join(map(repr, ''.join(chunks).split('\n')))
76
77
    def assertEqualDiffEncoded(self, expected, actual):
78
        """Compare the actual content to the expected content.
79
80
        :param expected: A group of chunks that we expect to see
81
        :param actual: The measured 'chunks'
82
83
        We will transform the chunks back into lines, and then run 'repr()'
84
        over them to handle non-ascii characters.
85
        """
86
        self.assertEqualDiff(self._chunks_to_repr_lines(expected),
87
                             self._chunks_to_repr_lines(actual))
88
0.17.2 by Robert Collins
Core proof of concept working.
89
    def test_two_nosha_delta(self):
90
        compressor = groupcompress.GroupCompressor(True)
91
        sha1_1, _ = compressor.compress(('label',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
92
            'strange\ncommon long line\nthat needs a 16 byte match\n', None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
93
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
94
        sha1_2, end_point = compressor.compress(('newlabel',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
95
            'common long line\nthat needs a 16 byte match\ndifferent\n', None)
96
        self.assertEqual(sha_string('common long line\n'
97
                                    'that needs a 16 byte match\n'
98
                                    'different\n'), sha1_2)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
99
        expected_lines.extend([
0.23.9 by John Arbash Meinel
We now basically have full support for using diff-delta as the compressor.
100
            'delta\n'
0.23.27 by John Arbash Meinel
Fix up some failing tests.
101
            'label:newlabel\n',
102
            'sha1:%s\n' % sha1_2,
103
            'len:16\n',
0.23.58 by John Arbash Meinel
fix up the failing tests.
104
            # source and target length
105
            '\x7e\x36',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
106
            # copy the line common
0.23.58 by John Arbash Meinel
fix up the failing tests.
107
            '\x91\x52\x2c', #copy, offset 0x52, len 0x2c
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
108
            # add the line different, and the trailing newline
0.23.58 by John Arbash Meinel
fix up the failing tests.
109
            '\x0adifferent\n', # insert 10 bytes
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
110
            ])
0.23.58 by John Arbash Meinel
fix up the failing tests.
111
        self.assertEqualDiffEncoded(expected_lines, compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
112
        self.assertEqual(sum(map(len, expected_lines)), end_point)
113
114
    def test_three_nosha_delta(self):
115
        # The first interesting test: make a change that should use lines from
116
        # both parents.
117
        compressor = groupcompress.GroupCompressor(True)
118
        sha1_1, end_point = compressor.compress(('label',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
119
            'strange\ncommon very very long line\nwith some extra text\n', None)
0.17.2 by Robert Collins
Core proof of concept working.
120
        sha1_2, _ = compressor.compress(('newlabel',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
121
            'different\nmoredifferent\nand then some more\n', None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
122
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
123
        sha1_3, end_point = compressor.compress(('label3',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
124
            'new\ncommon very very long line\nwith some extra text\n'
125
            'different\nmoredifferent\nand then some more\n',
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
126
            None)
0.17.2 by Robert Collins
Core proof of concept working.
127
        self.assertEqual(
0.23.58 by John Arbash Meinel
fix up the failing tests.
128
            sha_string('new\ncommon very very long line\nwith some extra text\n'
129
                       'different\nmoredifferent\nand then some more\n'),
0.17.2 by Robert Collins
Core proof of concept working.
130
            sha1_3)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
131
        expected_lines.extend([
0.23.27 by John Arbash Meinel
Fix up some failing tests.
132
            'delta\n',
133
            'label:label3\n',
134
            'sha1:%s\n' % sha1_3,
0.23.58 by John Arbash Meinel
fix up the failing tests.
135
            'len:13\n',
136
            '\xfa\x01\x5f' # source and target length
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
137
            # insert new
0.23.58 by John Arbash Meinel
fix up the failing tests.
138
            '\x03new',
139
            # Copy of first parent 'common' range
140
            '\x91\x51\x31' # copy, offset 0x51, 0x31 bytes
141
            # Copy of second parent 'different' range
142
            '\x91\xcf\x2b' # copy, offset 0xcf, 0x2b bytes
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
143
            ])
0.23.58 by John Arbash Meinel
fix up the failing tests.
144
        self.assertEqualDiffEncoded(expected_lines, compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
145
        self.assertEqual(sum(map(len, expected_lines)), end_point)
146
147
    def test_stats(self):
148
        compressor = groupcompress.GroupCompressor(True)
0.23.58 by John Arbash Meinel
fix up the failing tests.
149
        compressor.compress(('label',), 'strange\ncommon\n', None)
0.17.2 by Robert Collins
Core proof of concept working.
150
        compressor.compress(('newlabel',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
151
                            'common\ndifferent\nmoredifferent\n', None)
0.17.2 by Robert Collins
Core proof of concept working.
152
        compressor.compress(('label3',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
153
                            'new\ncommon\ndifferent\nmoredifferent\n', None)
0.17.2 by Robert Collins
Core proof of concept working.
154
        self.assertAlmostEqual(0.3, compressor.ratio(), 1)
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
155
156
    def test_extract_from_compressor(self):
157
        # Knit fetching will try to reconstruct texts locally which results in
158
        # reading something that is in the compressor stream already.
159
        compressor = groupcompress.GroupCompressor(True)
0.23.58 by John Arbash Meinel
fix up the failing tests.
160
        sha_1,  _ = compressor.compress(('label',), 'strange\ncommon\n', None)
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
161
        sha_2, _ = compressor.compress(('newlabel',),
0.23.58 by John Arbash Meinel
fix up the failing tests.
162
            'common\ndifferent\nmoredifferent\n', None)
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
163
        # get the first out
0.23.27 by John Arbash Meinel
Fix up some failing tests.
164
        self.assertEqual((['strange\ncommon\n'], sha_1),
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
165
            compressor.extract(('label',)))
166
        # and the second
0.23.27 by John Arbash Meinel
Fix up some failing tests.
167
        self.assertEqual((['common\ndifferent\nmoredifferent\n'],
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
168
            sha_2), compressor.extract(('newlabel',)))
0.25.2 by John Arbash Meinel
First cut at meta-info as text form.
169
170
171
class TestGroupCompressBlock(tests.TestCase):
172
173
    def test_from_empty_bytes(self):
174
        self.assertRaises(errors.InvalidGroupCompressBlock,
175
                          groupcompress.GroupCompressBlock.from_bytes, '')
176
177
    def test_from_bytes(self):
178
        block = groupcompress.GroupCompressBlock.from_bytes('gcb1p\n')
179
        self.assertIsInstance(block, groupcompress.GroupCompressBlock)
180
181
    def test_add_entry(self):
182
        gcb = groupcompress.GroupCompressBlock()
183
        e = gcb.add_entry(('foo', 'bar'), 'fulltext', 'abcd'*10, 0, 100)
184
        self.assertIsInstance(e, groupcompress.GroupCompressBlockEntry)
185
        self.assertEqual(('foo', 'bar'), e.key)
186
        self.assertEqual('fulltext', e.type)
187
        self.assertEqual('abcd'*10, e.sha1)
188
        self.assertEqual(0, e.start)
189
        self.assertEqual(100, e.length)
190
191
    def test_to_bytes(self):
192
        gcb = groupcompress.GroupCompressBlock()
193
        gcb.add_entry(('foo', 'bar'), 'fulltext', 'abcd'*10, 0, 100)
194
        gcb.add_entry(('bing',), 'fulltext', 'abcd'*10, 100, 100)
195
        self.assertEqualDiff('gcb1p\n' # group compress block v1 plain
196
                             '183\n' # Length of all meta-info
197
                             'key:bing\n'
198
                             'type:fulltext\n'
199
                             'sha1:abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd\n'
200
                             'start:100\n'
201
                             'length:100\n'
202
                             '\n'
203
                             'key:foo\x00bar\n'
204
                             'type:fulltext\n'
205
                             'sha1:abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd\n'
206
                             'start:0\n'
207
                             'length:100\n'
208
                             '\n', gcb.to_bytes())