/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
1
# groupcompress, a bzr plugin providing new compression logic.
2
# Copyright (C) 2008 Canonical Limited.
3
# 
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License version 2 as published
6
# by the Free Software Foundation.
7
# 
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
# 
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
16
# 
17
18
"""Tests for group compression."""
19
20
import zlib
21
22
from bzrlib import tests
0.17.2 by Robert Collins
Core proof of concept working.
23
from bzrlib.osutils import sha_strings
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
24
from bzrlib.plugins.groupcompress import errors, groupcompress
25
from bzrlib.tests import (
26
    TestCaseWithTransport,
27
    TestScenarioApplier,
28
    adapt_tests,
29
    )
30
from bzrlib.transport import get_transport
31
32
33
def load_tests(standard_tests, module, loader):
34
    from bzrlib.tests.test_versionedfile import TestVersionedFiles
35
    vf_interface_tests = loader.loadTestsFromTestCase(TestVersionedFiles)
36
    cleanup_pack_group = groupcompress.cleanup_pack_group
37
    make_pack_factory = groupcompress.make_pack_factory
38
    group_scenario = ('groupcompress-nograph', {
39
            'cleanup':cleanup_pack_group,
40
            'factory':make_pack_factory(False, False, 1),
41
            'graph': False,
42
            'key_length':1,
43
            }
44
        )
45
    applier = TestScenarioApplier()
46
    applier.scenarios = [group_scenario]
47
    adapt_tests(vf_interface_tests, applier, standard_tests)
48
    return standard_tests
49
50
0.17.2 by Robert Collins
Core proof of concept working.
51
class TestGroupCompressor(TestCaseWithTransport):
52
    """Tests for GroupCompressor"""
53
54
    def test_empty_delta(self):
55
        compressor = groupcompress.GroupCompressor(True)
56
        self.assertEqual([], compressor.lines)
57
58
    def test_one_nosha_delta(self):
59
        # diff against NUKK
60
        compressor = groupcompress.GroupCompressor(True)
61
        sha1, end_point = compressor.compress(('label',),
62
            ['strange\n', 'common\n'], None)
63
        self.assertEqual(sha_strings(['strange\n', 'common\n']), sha1)
64
        expected_lines = [
65
            'label: label\n',
66
            'sha1: %s\n' % sha1,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
67
            'i,3\n',
0.17.2 by Robert Collins
Core proof of concept working.
68
            'strange\n',
69
            'common\n',
70
            '\n', # the last \n in a text is removed, which allows safe
71
            # serialisation of lines without trailing \n.
72
            ]
73
        self.assertEqual(expected_lines, compressor.lines)
74
        self.assertEqual(sum(map(len, expected_lines)), end_point)
75
76
    def test_two_nosha_delta(self):
77
        compressor = groupcompress.GroupCompressor(True)
78
        sha1_1, _ = compressor.compress(('label',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
79
            ['strange\n', 'common long line\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
80
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
81
        sha1_2, end_point = compressor.compress(('newlabel',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
82
            ['common long line\n', 'different\n'], None)
83
        self.assertEqual(sha_strings(['common long line\n', 'different\n']),
84
                         sha1_2)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
85
        expected_lines.extend([
0.17.2 by Robert Collins
Core proof of concept working.
86
            'label: newlabel\n',
87
            'sha1: %s\n' % sha1_2,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
88
            # copy the line common
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
89
            'c,72,17\n',
90
            # add the line different, and the trailing newline
91
            'i,2\n',
0.17.2 by Robert Collins
Core proof of concept working.
92
            'different\n',
0.17.13 by Robert Collins
Do not output copy instructions which take more to encode than a fresh insert. (But do not refer to those insertions when finding ranges to copy: they are not interesting).
93
            '\n'
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
94
            ])
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
95
        self.assertEqualDiff(''.join(expected_lines), ''.join(compressor.lines))
0.17.2 by Robert Collins
Core proof of concept working.
96
        self.assertEqual(sum(map(len, expected_lines)), end_point)
97
98
    def test_three_nosha_delta(self):
99
        # The first interesting test: make a change that should use lines from
100
        # both parents.
101
        compressor = groupcompress.GroupCompressor(True)
102
        sha1_1, end_point = compressor.compress(('label',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
103
            ['strange\n', 'common long line\n'], None)
0.17.2 by Robert Collins
Core proof of concept working.
104
        sha1_2, _ = compressor.compress(('newlabel',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
105
            ['common long line\n', 'different\n', 'moredifferent\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
106
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
107
        sha1_3, end_point = compressor.compress(('label3',),
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
108
            ['new\n', 'common long line\n', 'different\n', 'moredifferent\n'],
109
            None)
0.17.2 by Robert Collins
Core proof of concept working.
110
        self.assertEqual(
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
111
            sha_strings(['new\n', 'common long line\n', 'different\n',
112
                         'moredifferent\n']),
0.17.2 by Robert Collins
Core proof of concept working.
113
            sha1_3)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
114
        expected_lines.extend([
0.17.2 by Robert Collins
Core proof of concept working.
115
            'label: label3\n',
116
            'sha1: %s\n' % sha1_3,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
117
            # insert new
118
            'i,1\n',
0.17.2 by Robert Collins
Core proof of concept working.
119
            'new\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
120
            # copy the line common
0.20.17 by John Arbash Meinel
Fix the test suite now that we don't match short lines
121
            'c,72,17\n',
122
            # copy the lines different, moredifferent and trailing newline
123
            'c,165,25\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
124
            ])
0.18.6 by John Arbash Meinel
Use the new EquivalenceTable to track the lines.
125
        self.assertEqualDiff(''.join(expected_lines),
126
                             ''.join(compressor.lines))
0.17.2 by Robert Collins
Core proof of concept working.
127
        self.assertEqual(sum(map(len, expected_lines)), end_point)
128
129
    def test_stats(self):
130
        compressor = groupcompress.GroupCompressor(True)
131
        compressor.compress(('label',),
132
            ['strange\n', 'common\n'], None)
133
        compressor.compress(('newlabel',),
134
            ['common\n', 'different\n', 'moredifferent\n'], None)
135
        compressor.compress(('label3',),
136
            ['new\n', 'common\n', 'different\n', 'moredifferent\n'], None)
137
        self.assertAlmostEqual(0.3, compressor.ratio(), 1)
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
138
139
    def test_extract_from_compressor(self):
140
        # Knit fetching will try to reconstruct texts locally which results in
141
        # reading something that is in the compressor stream already.
142
        compressor = groupcompress.GroupCompressor(True)
143
        sha_1,  _ = compressor.compress(('label',),
144
            ['strange\n', 'common\n'], None)
145
        sha_2, _ = compressor.compress(('newlabel',),
146
            ['common\n', 'different\n', 'moredifferent\n'], None)
147
        # get the first out
148
        self.assertEqual((['strange\n', 'common\n'], sha_1),
149
            compressor.extract(('label',)))
150
        # and the second
151
        self.assertEqual((['common\n', 'different\n', 'moredifferent\n'],
152
            sha_2), compressor.extract(('newlabel',)))