/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
1
# groupcompress, a bzr plugin providing new compression logic.
2
# Copyright (C) 2008 Canonical Limited.
3
# 
4
# This program is free software; you can redistribute it and/or modify
5
# it under the terms of the GNU General Public License version 2 as published
6
# by the Free Software Foundation.
7
# 
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
# 
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
16
# 
17
18
"""Tests for group compression."""
19
20
import zlib
21
22
from bzrlib import tests
0.17.2 by Robert Collins
Core proof of concept working.
23
from bzrlib.osutils import sha_strings
0.17.1 by Robert Collins
Starting point. Interface tests hooked up and failing.
24
from bzrlib.plugins.groupcompress import errors, groupcompress
25
from bzrlib.tests import (
26
    TestCaseWithTransport,
27
    TestScenarioApplier,
28
    adapt_tests,
29
    )
30
from bzrlib.transport import get_transport
31
32
33
def load_tests(standard_tests, module, loader):
34
    from bzrlib.tests.test_versionedfile import TestVersionedFiles
35
    vf_interface_tests = loader.loadTestsFromTestCase(TestVersionedFiles)
36
    cleanup_pack_group = groupcompress.cleanup_pack_group
37
    make_pack_factory = groupcompress.make_pack_factory
38
    group_scenario = ('groupcompress-nograph', {
39
            'cleanup':cleanup_pack_group,
40
            'factory':make_pack_factory(False, False, 1),
41
            'graph': False,
42
            'key_length':1,
43
            }
44
        )
45
    applier = TestScenarioApplier()
46
    applier.scenarios = [group_scenario]
47
    adapt_tests(vf_interface_tests, applier, standard_tests)
48
    return standard_tests
49
50
0.17.2 by Robert Collins
Core proof of concept working.
51
class TestGroupCompressor(TestCaseWithTransport):
52
    """Tests for GroupCompressor"""
53
54
    def test_empty_delta(self):
55
        compressor = groupcompress.GroupCompressor(True)
56
        self.assertEqual([], compressor.lines)
57
58
    def test_one_nosha_delta(self):
59
        # diff against NUKK
60
        compressor = groupcompress.GroupCompressor(True)
61
        sha1, end_point = compressor.compress(('label',),
62
            ['strange\n', 'common\n'], None)
63
        self.assertEqual(sha_strings(['strange\n', 'common\n']), sha1)
64
        expected_lines = [
65
            'label: label\n',
66
            'sha1: %s\n' % sha1,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
67
            'i,3\n',
0.17.2 by Robert Collins
Core proof of concept working.
68
            'strange\n',
69
            'common\n',
70
            '\n', # the last \n in a text is removed, which allows safe
71
            # serialisation of lines without trailing \n.
72
            ]
73
        self.assertEqual(expected_lines, compressor.lines)
74
        self.assertEqual(sum(map(len, expected_lines)), end_point)
75
76
    def test_two_nosha_delta(self):
77
        compressor = groupcompress.GroupCompressor(True)
78
        sha1_1, _ = compressor.compress(('label',),
79
            ['strange\n', 'common\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
80
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
81
        sha1_2, end_point = compressor.compress(('newlabel',),
82
            ['common\n', 'different\n'], None)
83
        self.assertEqual(sha_strings(['common\n', 'different\n']), sha1_2)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
84
        expected_lines.extend([
0.17.2 by Robert Collins
Core proof of concept working.
85
            'label: newlabel\n',
86
            'sha1: %s\n' % sha1_2,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
87
            # copy the line common
0.17.12 by Robert Collins
Encode copy ranges as bytes not lines, halves decode overhead.
88
            'c,72,7\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
89
            # add the line different
90
            'i,1\n',
0.17.2 by Robert Collins
Core proof of concept working.
91
            'different\n',
0.17.13 by Robert Collins
Do not output copy instructions which take more to encode than a fresh insert. (But do not refer to those insertions when finding ranges to copy: they are not interesting).
92
            # Insert the trailing newline.
93
            'i,1\n',
94
            '\n'
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
95
            ])
0.17.2 by Robert Collins
Core proof of concept working.
96
        self.assertEqual(expected_lines, compressor.lines)
97
        self.assertEqual(sum(map(len, expected_lines)), end_point)
98
99
    def test_three_nosha_delta(self):
100
        # The first interesting test: make a change that should use lines from
101
        # both parents.
102
        compressor = groupcompress.GroupCompressor(True)
103
        sha1_1, end_point = compressor.compress(('label',),
104
            ['strange\n', 'common\n'], None)
105
        sha1_2, _ = compressor.compress(('newlabel',),
106
            ['common\n', 'different\n', 'moredifferent\n'], None)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
107
        expected_lines = list(compressor.lines)
0.17.2 by Robert Collins
Core proof of concept working.
108
        sha1_3, end_point = compressor.compress(('label3',),
109
            ['new\n', 'common\n', 'different\n', 'moredifferent\n'], None)
110
        self.assertEqual(
111
            sha_strings(['new\n', 'common\n', 'different\n', 'moredifferent\n']),
112
            sha1_3)
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
113
        expected_lines.extend([
0.17.2 by Robert Collins
Core proof of concept working.
114
            'label: label3\n',
115
            'sha1: %s\n' % sha1_3,
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
116
            # insert new
117
            'i,1\n',
0.17.2 by Robert Collins
Core proof of concept working.
118
            'new\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
119
            # copy the line common
0.17.12 by Robert Collins
Encode copy ranges as bytes not lines, halves decode overhead.
120
            'c,72,7\n',
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
121
            # copy the lines different, moredifferent
0.17.12 by Robert Collins
Encode copy ranges as bytes not lines, halves decode overhead.
122
            'c,154,24\n',
0.17.13 by Robert Collins
Do not output copy instructions which take more to encode than a fresh insert. (But do not refer to those insertions when finding ranges to copy: they are not interesting).
123
            # Insert the trailing newline.
124
            'i,1\n',
125
            '\n'
0.17.3 by Robert Collins
new encoder, allows non monotonically increasing sequence matches for moar compression.
126
            ])
0.18.6 by John Arbash Meinel
Use the new EquivalenceTable to track the lines.
127
        self.assertEqualDiff(''.join(expected_lines),
128
                             ''.join(compressor.lines))
0.17.2 by Robert Collins
Core proof of concept working.
129
        self.assertEqual(sum(map(len, expected_lines)), end_point)
130
131
    def test_stats(self):
132
        compressor = groupcompress.GroupCompressor(True)
133
        compressor.compress(('label',),
134
            ['strange\n', 'common\n'], None)
135
        compressor.compress(('newlabel',),
136
            ['common\n', 'different\n', 'moredifferent\n'], None)
137
        compressor.compress(('label3',),
138
            ['new\n', 'common\n', 'different\n', 'moredifferent\n'], None)
139
        self.assertAlmostEqual(0.3, compressor.ratio(), 1)
0.17.11 by Robert Collins
Add extraction of just-compressed texts to support converting from knits.
140
141
    def test_extract_from_compressor(self):
142
        # Knit fetching will try to reconstruct texts locally which results in
143
        # reading something that is in the compressor stream already.
144
        compressor = groupcompress.GroupCompressor(True)
145
        sha_1,  _ = compressor.compress(('label',),
146
            ['strange\n', 'common\n'], None)
147
        sha_2, _ = compressor.compress(('newlabel',),
148
            ['common\n', 'different\n', 'moredifferent\n'], None)
149
        # get the first out
150
        self.assertEqual((['strange\n', 'common\n'], sha_1),
151
            compressor.extract(('label',)))
152
        # and the second
153
        self.assertEqual((['common\n', 'different\n', 'moredifferent\n'],
154
            sha_2), compressor.extract(('newlabel',)))