/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
1
# groupcompress, a bzr plugin providing new compression logic.
2
# Copyright (C) 2008 Canonical Limited.
3
# 
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License version 2 as published
6
# by the Free Software Foundation.
7
# 
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
# 
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
16
# 
17
18
"""Tests for group compression."""
19
20
import zlib
21
22
from bzrlib import tests
0.17.2 by Robert Collins
Core proof of concept working.
23
from bzrlib.osutils import sha_strings
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
24
from bzrlib.plugins.groupcompress import errors, groupcompress
25
from bzrlib.tests import (
26
    TestCaseWithTransport,
27
    TestScenarioApplier,
28
    adapt_tests,
29
    )
30
from bzrlib.transport import get_transport
31
32
33
def load_tests(standard_tests, module, loader):
34
    from bzrlib.tests.test_versionedfile import TestVersionedFiles
35
    vf_interface_tests = loader.loadTestsFromTestCase(TestVersionedFiles)
36
    cleanup_pack_group = groupcompress.cleanup_pack_group
37
    make_pack_factory = groupcompress.make_pack_factory
38
    group_scenario = ('groupcompress-nograph', {
39
            'cleanup':cleanup_pack_group,
40
            'factory':make_pack_factory(False, False, 1),
41
            'graph': False,
42
            'key_length':1,
43
            }
44
        )
45
    applier = TestScenarioApplier()
46
    applier.scenarios = [group_scenario]
47
    adapt_tests(vf_interface_tests, applier, standard_tests)
48
    return standard_tests
49
50
0.17.2 by Robert Collins
Core proof of concept working.
51
class TestGroupCompressor(TestCaseWithTransport):
52
    """Tests for GroupCompressor"""
53
54
    def test_empty_delta(self):
55
        compressor = groupcompress.GroupCompressor(True)
56
        self.assertEqual([], compressor.lines)
57
58
    def test_one_nosha_delta(self):
59
        # diff against NUKK
60
        compressor = groupcompress.GroupCompressor(True)
61
        sha1, end_point = compressor.compress(('label',),
62
            ['strange\n', 'common\n'], None)
63
        self.assertEqual(sha_strings(['strange\n', 'common\n']), sha1)
64
        expected_lines = [
65
            'label: label\n',
66
            'sha1: %s\n' % sha1,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
67
            'i,3\n',
0.17.2 by Robert Collins
Core proof of concept working.
68
            'strange\n',
69
            'common\n',
70
            '\n', # the last \n in a text is removed, which allows safe
71
            # serialisation of lines without trailing \n.
72
            ]
73
        self.assertEqual(expected_lines, compressor.lines)
74
        self.assertEqual(sum(map(len, expected_lines)), end_point)
75
76
    def test_two_nosha_delta(self):
77
        compressor = groupcompress.GroupCompressor(True)
78
        sha1_1, _ = compressor.compress(('label',),
79
            ['strange\n', 'common\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
80
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
81
        sha1_2, end_point = compressor.compress(('newlabel',),
82
            ['common\n', 'different\n'], None)
83
        self.assertEqual(sha_strings(['common\n', 'different\n']), sha1_2)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
84
        expected_lines.extend([
0.17.2 by Robert Collins
Core proof of concept working.
85
            'label: newlabel\n',
86
            'sha1: %s\n' % sha1_2,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
87
            # copy the line common
88
            'c,4,1\n',
89
            # add the line different
90
            'i,1\n',
0.17.2 by Robert Collins
Core proof of concept working.
91
            'different\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
92
            # copy the line \n. Note that when we filter on encoding-overhead
93
            # this will become a fresh insert instead
94
            'c,5,1\n',
95
            ])
0.17.2 by Robert Collins
Core proof of concept working.
96
        self.assertEqual(expected_lines, compressor.lines)
97
        self.assertEqual(sum(map(len, expected_lines)), end_point)
98
99
    def test_three_nosha_delta(self):
100
        # The first interesting test: make a change that should use lines from
101
        # both parents.
102
        compressor = groupcompress.GroupCompressor(True)
103
        sha1_1, end_point = compressor.compress(('label',),
104
            ['strange\n', 'common\n'], None)
105
        sha1_2, _ = compressor.compress(('newlabel',),
106
            ['common\n', 'different\n', 'moredifferent\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
107
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
108
        sha1_3, end_point = compressor.compress(('label3',),
109
            ['new\n', 'common\n', 'different\n', 'moredifferent\n'], None)
110
        self.assertEqual(
111
            sha_strings(['new\n', 'common\n', 'different\n', 'moredifferent\n']),
112
            sha1_3)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
113
        expected_lines.extend([
0.17.2 by Robert Collins
Core proof of concept working.
114
            'label: label3\n',
115
            'sha1: %s\n' % sha1_3,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
116
            # insert new
117
            'i,1\n',
0.17.2 by Robert Collins
Core proof of concept working.
118
            'new\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
119
            # copy the line common
120
            'c,4,1\n',
121
            # copy the lines different, moredifferent
122
            'c,10,2\n',
123
            # copy the line \n. Note that when we filter on encoding-overhead
124
            # this will become a fresh insert instead
125
            'c,5,1\n',
126
            ])
0.17.2 by Robert Collins
Core proof of concept working.
127
        self.assertEqualDiff(''.join(expected_lines), ''.join(compressor.lines))
128
        self.assertEqual(sum(map(len, expected_lines)), end_point)
129
130
    def test_stats(self):
131
        compressor = groupcompress.GroupCompressor(True)
132
        compressor.compress(('label',),
133
            ['strange\n', 'common\n'], None)
134
        compressor.compress(('newlabel',),
135
            ['common\n', 'different\n', 'moredifferent\n'], None)
136
        compressor.compress(('label3',),
137
            ['new\n', 'common\n', 'different\n', 'moredifferent\n'], None)
138
        self.assertAlmostEqual(0.3, compressor.ratio(), 1)