/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar
5844.2.2 by Jelmer Vernooij
Move vf-specific write group tests to per_repository_vf.
1
# Copyright (C) 2007-2011 Canonical Ltd
2
#
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
17
"""Tests for repository write groups."""
18
19
import sys
20
21
from bzrlib import (
22
    branch,
6472.2.3 by Jelmer Vernooij
More control dir.
23
    controldir,
5844.2.2 by Jelmer Vernooij
Move vf-specific write group tests to per_repository_vf.
24
    errors,
25
    memorytree,
26
    remote,
27
    tests,
28
    versionedfile,
29
    )
30
from bzrlib.tests.per_repository_vf import (
31
    TestCaseWithRepository,
32
    all_repository_vf_format_scenarios,
33
    )
34
from bzrlib.tests.scenarios import load_tests_apply_scenarios
35
36
37
load_tests = load_tests_apply_scenarios
38
39
40
class TestGetMissingParentInventories(TestCaseWithRepository):
41
42
    scenarios = all_repository_vf_format_scenarios()
43
44
    def test_empty_get_missing_parent_inventories(self):
45
        """A new write group has no missing parent inventories."""
46
        repo = self.make_repository('.')
47
        repo.lock_write()
48
        repo.start_write_group()
49
        try:
50
            self.assertEqual(set(), set(repo.get_missing_parent_inventories()))
51
        finally:
52
            repo.commit_write_group()
53
            repo.unlock()
54
55
    def branch_trunk_and_make_tree(self, trunk_repo, relpath):
56
        tree = self.make_branch_and_memory_tree('branch')
57
        trunk_repo.lock_read()
58
        self.addCleanup(trunk_repo.unlock)
59
        tree.branch.repository.fetch(trunk_repo, revision_id='rev-1')
60
        tree.set_parent_ids(['rev-1'])
61
        return tree 
62
63
    def make_first_commit(self, repo):
64
        trunk = repo.bzrdir.create_branch()
65
        tree = memorytree.MemoryTree.create_on_branch(trunk)
66
        tree.lock_write()
67
        tree.add([''], ['TREE_ROOT'], ['directory'])
68
        tree.add(['dir'], ['dir-id'], ['directory'])
69
        tree.add(['filename'], ['file-id'], ['file'])
70
        tree.put_file_bytes_non_atomic('file-id', 'content\n')
71
        tree.commit('Trunk commit', rev_id='rev-0')
72
        tree.commit('Trunk commit', rev_id='rev-1')
73
        tree.unlock()
74
75
    def make_new_commit_in_new_repo(self, trunk_repo, parents=None):
76
        tree = self.branch_trunk_and_make_tree(trunk_repo, 'branch')
77
        tree.set_parent_ids(parents)
78
        tree.commit('Branch commit', rev_id='rev-2')
79
        branch_repo = tree.branch.repository
80
        branch_repo.lock_read()
81
        self.addCleanup(branch_repo.unlock)
82
        return branch_repo
83
84
    def make_stackable_repo(self, relpath='trunk'):
85
        if isinstance(self.repository_format, remote.RemoteRepositoryFormat):
86
            # RemoteRepository by default builds a default format real
87
            # repository, but the default format is unstackble.  So explicitly
88
            # make a stackable real repository and use that.
89
            repo = self.make_repository(relpath, format='1.9')
6472.2.3 by Jelmer Vernooij
More control dir.
90
            dir = controldir.ControlDir.open(self.get_url(relpath))
91
            repo = dir.open_repository()
5844.2.2 by Jelmer Vernooij
Move vf-specific write group tests to per_repository_vf.
92
        else:
93
            repo = self.make_repository(relpath)
94
        if not repo._format.supports_external_lookups:
95
            raise tests.TestNotApplicable('format not stackable')
96
        repo.bzrdir._format.set_branch_format(branch.BzrBranchFormat7())
97
        return repo
98
99
    def reopen_repo_and_resume_write_group(self, repo):
100
        try:
101
            resume_tokens = repo.suspend_write_group()
102
        except errors.UnsuspendableWriteGroup:
103
            # If we got this far, and this repo does not support resuming write
104
            # groups, then get_missing_parent_inventories works in all
105
            # cases this repo supports.
106
            repo.unlock()
107
            return
108
        repo.unlock()
109
        reopened_repo = repo.bzrdir.open_repository()
110
        reopened_repo.lock_write()
111
        self.addCleanup(reopened_repo.unlock)
112
        reopened_repo.resume_write_group(resume_tokens)
113
        return reopened_repo
114
115
    def test_ghost_revision(self):
116
        """A parent inventory may be absent if all the needed texts are present.
117
        i.e., a ghost revision isn't (necessarily) considered to be a missing
118
        parent inventory.
119
        """
120
        # Make a trunk with one commit.
121
        trunk_repo = self.make_stackable_repo()
122
        self.make_first_commit(trunk_repo)
123
        trunk_repo.lock_read()
124
        self.addCleanup(trunk_repo.unlock)
125
        # Branch the trunk, add a new commit.
126
        branch_repo = self.make_new_commit_in_new_repo(
127
            trunk_repo, parents=['rev-1', 'ghost-rev'])
128
        inv = branch_repo.get_inventory('rev-2')
129
        # Make a new repo stacked on trunk, and then copy into it:
130
        #  - all texts in rev-2
131
        #  - the new inventory (rev-2)
132
        #  - the new revision (rev-2)
133
        repo = self.make_stackable_repo('stacked')
134
        repo.lock_write()
135
        repo.start_write_group()
136
        # Add all texts from in rev-2 inventory.  Note that this has to exclude
137
        # the root if the repo format does not support rich roots.
138
        rich_root = branch_repo._format.rich_root_data
139
        all_texts = [
140
            (ie.file_id, ie.revision) for ie in inv.iter_just_entries()
141
             if rich_root or inv.id2path(ie.file_id) != '']
142
        repo.texts.insert_record_stream(
143
            branch_repo.texts.get_record_stream(all_texts, 'unordered', False))
144
        # Add inventory and revision for rev-2.
145
        repo.add_inventory('rev-2', inv, ['rev-1', 'ghost-rev'])
146
        repo.revisions.insert_record_stream(
147
            branch_repo.revisions.get_record_stream(
148
                [('rev-2',)], 'unordered', False))
149
        # Now, no inventories are reported as missing, even though there is a
150
        # ghost.
151
        self.assertEqual(set(), repo.get_missing_parent_inventories())
152
        # Resuming the write group does not affect
153
        # get_missing_parent_inventories.
154
        reopened_repo = self.reopen_repo_and_resume_write_group(repo)
155
        self.assertEqual(set(), reopened_repo.get_missing_parent_inventories())
156
        reopened_repo.abort_write_group()
157
158
    def test_get_missing_parent_inventories(self):
159
        """A stacked repo with a single revision and inventory (no parent
160
        inventory) in it must have all the texts in its inventory (even if not
161
        changed w.r.t. to the absent parent), otherwise it will report missing
162
        texts/parent inventory.
163
164
        The core of this test is that a file was changed in rev-1, but in a
165
        stacked repo that only has rev-2
166
        """
167
        # Make a trunk with one commit.
168
        trunk_repo = self.make_stackable_repo()
169
        self.make_first_commit(trunk_repo)
170
        trunk_repo.lock_read()
171
        self.addCleanup(trunk_repo.unlock)
172
        # Branch the trunk, add a new commit.
173
        branch_repo = self.make_new_commit_in_new_repo(
174
            trunk_repo, parents=['rev-1'])
175
        inv = branch_repo.get_inventory('rev-2')
176
        # Make a new repo stacked on trunk, and copy the new commit's revision
177
        # and inventory records to it.
178
        repo = self.make_stackable_repo('stacked')
179
        repo.lock_write()
180
        repo.start_write_group()
181
        # Insert a single fulltext inv (using add_inventory because it's
182
        # simpler than insert_record_stream)
183
        repo.add_inventory('rev-2', inv, ['rev-1'])
184
        repo.revisions.insert_record_stream(
185
            branch_repo.revisions.get_record_stream(
186
                [('rev-2',)], 'unordered', False))
187
        # There should be no missing compression parents
188
        self.assertEqual(set(),
189
                repo.inventories.get_missing_compression_parent_keys())
190
        self.assertEqual(
191
            set([('inventories', 'rev-1')]),
192
            repo.get_missing_parent_inventories())
193
        # Resuming the write group does not affect
194
        # get_missing_parent_inventories.
195
        reopened_repo = self.reopen_repo_and_resume_write_group(repo)
196
        self.assertEqual(
197
            set([('inventories', 'rev-1')]),
198
            reopened_repo.get_missing_parent_inventories())
199
        # Adding the parent inventory satisfies get_missing_parent_inventories.
200
        reopened_repo.inventories.insert_record_stream(
201
            branch_repo.inventories.get_record_stream(
202
                [('rev-1',)], 'unordered', False))
203
        self.assertEqual(
204
            set(), reopened_repo.get_missing_parent_inventories())
205
        reopened_repo.abort_write_group()
206
207
    def test_get_missing_parent_inventories_check(self):
208
        builder = self.make_branch_builder('test')
209
        builder.build_snapshot('A-id', ['ghost-parent-id'], [
210
            ('add', ('', 'root-id', 'directory', None)),
211
            ('add', ('file', 'file-id', 'file', 'content\n'))],
212
            allow_leftmost_as_ghost=True)
213
        b = builder.get_branch()
214
        b.lock_read()
215
        self.addCleanup(b.unlock)
216
        repo = self.make_repository('test-repo')
217
        repo.lock_write()
218
        self.addCleanup(repo.unlock)
219
        repo.start_write_group()
220
        self.addCleanup(repo.abort_write_group)
221
        # Now, add the objects manually
222
        text_keys = [('file-id', 'A-id')]
223
        if repo.supports_rich_root():
224
            text_keys.append(('root-id', 'A-id'))
225
        # Directly add the texts, inventory, and revision object for 'A-id'
226
        repo.texts.insert_record_stream(b.repository.texts.get_record_stream(
227
            text_keys, 'unordered', True))
228
        repo.add_revision('A-id', b.repository.get_revision('A-id'),
229
                          b.repository.get_inventory('A-id'))
230
        get_missing = repo.get_missing_parent_inventories
231
        if repo._format.supports_external_lookups:
232
            self.assertEqual(set([('inventories', 'ghost-parent-id')]),
233
                get_missing(check_for_missing_texts=False))
234
            self.assertEqual(set(), get_missing(check_for_missing_texts=True))
235
            self.assertEqual(set(), get_missing())
236
        else:
237
            # If we don't support external lookups, we always return empty
238
            self.assertEqual(set(), get_missing(check_for_missing_texts=False))
239
            self.assertEqual(set(), get_missing(check_for_missing_texts=True))
240
            self.assertEqual(set(), get_missing())
241
242
    def test_insert_stream_passes_resume_info(self):
243
        repo = self.make_repository('test-repo')
244
        if (not repo._format.supports_external_lookups or
245
            isinstance(repo, remote.RemoteRepository)):
246
            raise tests.TestNotApplicable(
247
                'only valid for direct connections to resumable repos')
248
        # log calls to get_missing_parent_inventories, so that we can assert it
249
        # is called with the correct parameters
250
        call_log = []
251
        orig = repo.get_missing_parent_inventories
252
        def get_missing(check_for_missing_texts=True):
253
            call_log.append(check_for_missing_texts)
254
            return orig(check_for_missing_texts=check_for_missing_texts)
255
        repo.get_missing_parent_inventories = get_missing
256
        repo.lock_write()
257
        self.addCleanup(repo.unlock)
258
        sink = repo._get_sink()
259
        sink.insert_stream((), repo._format, [])
260
        self.assertEqual([False], call_log)
261
        del call_log[:]
262
        repo.start_write_group()
263
        # We need to insert something, or suspend_write_group won't actually
264
        # create a token
265
        repo.texts.insert_record_stream([versionedfile.FulltextContentFactory(
266
            ('file-id', 'rev-id'), (), None, 'lines\n')])
267
        tokens = repo.suspend_write_group()
268
        self.assertNotEqual([], tokens)
269
        sink.insert_stream((), repo._format, tokens)
270
        self.assertEqual([True], call_log)
271
272
    def test_insert_stream_without_locking_fails_without_lock(self):
273
        repo = self.make_repository('test-repo')
274
        sink = repo._get_sink()
275
        stream = [('texts', [versionedfile.FulltextContentFactory(
276
            ('file-id', 'rev-id'), (), None, 'lines\n')])]
277
        self.assertRaises(errors.ObjectNotLocked,
278
            sink.insert_stream_without_locking, stream, repo._format)
279
280
    def test_insert_stream_without_locking_fails_without_write_group(self):
281
        repo = self.make_repository('test-repo')
282
        self.addCleanup(repo.lock_write().unlock)
283
        sink = repo._get_sink()
284
        stream = [('texts', [versionedfile.FulltextContentFactory(
285
            ('file-id', 'rev-id'), (), None, 'lines\n')])]
286
        self.assertRaises(errors.BzrError,
287
            sink.insert_stream_without_locking, stream, repo._format)
288
289
    def test_insert_stream_without_locking(self):
290
        repo = self.make_repository('test-repo')
291
        self.addCleanup(repo.lock_write().unlock)
292
        repo.start_write_group()
293
        sink = repo._get_sink()
294
        stream = [('texts', [versionedfile.FulltextContentFactory(
295
            ('file-id', 'rev-id'), (), None, 'lines\n')])]
296
        missing_keys = sink.insert_stream_without_locking(stream, repo._format)
297
        repo.commit_write_group()
298
        self.assertEqual(set(), missing_keys)
299
300
301
class TestResumeableWriteGroup(TestCaseWithRepository):
302
303
    scenarios = all_repository_vf_format_scenarios()
304
305
    def make_write_locked_repo(self, relpath='repo'):
306
        repo = self.make_repository(relpath)
307
        repo.lock_write()
308
        self.addCleanup(repo.unlock)
309
        return repo
310
311
    def reopen_repo(self, repo):
312
        same_repo = repo.bzrdir.open_repository()
313
        same_repo.lock_write()
314
        self.addCleanup(same_repo.unlock)
315
        return same_repo
316
317
    def require_suspendable_write_groups(self, reason):
318
        repo = self.make_repository('__suspend_test')
319
        repo.lock_write()
320
        self.addCleanup(repo.unlock)
321
        repo.start_write_group()
322
        try:
323
            wg_tokens = repo.suspend_write_group()
324
        except errors.UnsuspendableWriteGroup:
325
            repo.abort_write_group()
326
            raise tests.TestNotApplicable(reason)
327
328
    def test_suspend_write_group(self):
329
        repo = self.make_write_locked_repo()
330
        repo.start_write_group()
331
        # Add some content so this isn't an empty write group (which may return
332
        # 0 tokens)
333
        repo.texts.add_lines(('file-id', 'revid'), (), ['lines'])
334
        try:
335
            wg_tokens = repo.suspend_write_group()
336
        except errors.UnsuspendableWriteGroup:
337
            # The contract for repos that don't support suspending write groups
338
            # is that suspend_write_group raises UnsuspendableWriteGroup, but
339
            # is otherwise a no-op.  So we can still e.g. abort the write group
340
            # as usual.
341
            self.assertTrue(repo.is_in_write_group())
342
            repo.abort_write_group()
343
        else:
344
            # After suspending a write group we are no longer in a write group
345
            self.assertFalse(repo.is_in_write_group())
346
            # suspend_write_group returns a list of tokens, which are strs.  If
347
            # no other write groups were resumed, there will only be one token.
348
            self.assertEqual(1, len(wg_tokens))
349
            self.assertIsInstance(wg_tokens[0], str)
350
            # See also test_pack_repository's test of the same name.
351
352
    def test_resume_write_group_then_abort(self):
353
        repo = self.make_write_locked_repo()
354
        repo.start_write_group()
355
        # Add some content so this isn't an empty write group (which may return
356
        # 0 tokens)
357
        text_key = ('file-id', 'revid')
358
        repo.texts.add_lines(text_key, (), ['lines'])
359
        try:
360
            wg_tokens = repo.suspend_write_group()
361
        except errors.UnsuspendableWriteGroup:
362
            # If the repo does not support suspending write groups, it doesn't
363
            # support resuming them either.
364
            repo.abort_write_group()
365
            self.assertRaises(
366
                errors.UnsuspendableWriteGroup, repo.resume_write_group, [])
367
        else:
368
            #self.assertEqual([], list(repo.texts.keys()))
369
            same_repo = self.reopen_repo(repo)
370
            same_repo.resume_write_group(wg_tokens)
371
            self.assertEqual([text_key], list(same_repo.texts.keys()))
372
            self.assertTrue(same_repo.is_in_write_group())
373
            same_repo.abort_write_group()
374
            self.assertEqual([], list(repo.texts.keys()))
375
            # See also test_pack_repository's test of the same name.
376
377
    def test_multiple_resume_write_group(self):
378
        self.require_suspendable_write_groups(
379
            'Cannot test resume on repo that does not support suspending')
380
        repo = self.make_write_locked_repo()
381
        repo.start_write_group()
382
        # Add some content so this isn't an empty write group (which may return
383
        # 0 tokens)
384
        first_key = ('file-id', 'revid')
385
        repo.texts.add_lines(first_key, (), ['lines'])
386
        wg_tokens = repo.suspend_write_group()
387
        same_repo = self.reopen_repo(repo)
388
        same_repo.resume_write_group(wg_tokens)
389
        self.assertTrue(same_repo.is_in_write_group())
390
        second_key = ('file-id', 'second-revid')
391
        same_repo.texts.add_lines(second_key, (first_key,), ['more lines'])
392
        try:
393
            new_wg_tokens = same_repo.suspend_write_group()
394
        except:
395
            e = sys.exc_info()
396
            same_repo.abort_write_group(suppress_errors=True)
397
            raise e[0], e[1], e[2]
398
        self.assertEqual(2, len(new_wg_tokens))
399
        self.assertSubset(wg_tokens, new_wg_tokens)
400
        same_repo = self.reopen_repo(repo)
401
        same_repo.resume_write_group(new_wg_tokens)
402
        both_keys = set([first_key, second_key])
403
        self.assertEqual(both_keys, same_repo.texts.keys())
404
        same_repo.abort_write_group()
405
406
    def test_no_op_suspend_resume(self):
407
        self.require_suspendable_write_groups(
408
            'Cannot test resume on repo that does not support suspending')
409
        repo = self.make_write_locked_repo()
410
        repo.start_write_group()
411
        # Add some content so this isn't an empty write group (which may return
412
        # 0 tokens)
413
        text_key = ('file-id', 'revid')
414
        repo.texts.add_lines(text_key, (), ['lines'])
415
        wg_tokens = repo.suspend_write_group()
416
        same_repo = self.reopen_repo(repo)
417
        same_repo.resume_write_group(wg_tokens)
418
        new_wg_tokens = same_repo.suspend_write_group()
419
        self.assertEqual(wg_tokens, new_wg_tokens)
420
        same_repo = self.reopen_repo(repo)
421
        same_repo.resume_write_group(wg_tokens)
422
        self.assertEqual([text_key], list(same_repo.texts.keys()))
423
        same_repo.abort_write_group()
424
425
    def test_read_after_suspend_fails(self):
426
        self.require_suspendable_write_groups(
427
            'Cannot test suspend on repo that does not support suspending')
428
        repo = self.make_write_locked_repo()
429
        repo.start_write_group()
430
        # Add some content so this isn't an empty write group (which may return
431
        # 0 tokens)
432
        text_key = ('file-id', 'revid')
433
        repo.texts.add_lines(text_key, (), ['lines'])
434
        wg_tokens = repo.suspend_write_group()
435
        self.assertEqual([], list(repo.texts.keys()))
436
437
    def test_read_after_second_suspend_fails(self):
438
        self.require_suspendable_write_groups(
439
            'Cannot test suspend on repo that does not support suspending')
440
        repo = self.make_write_locked_repo()
441
        repo.start_write_group()
442
        # Add some content so this isn't an empty write group (which may return
443
        # 0 tokens)
444
        text_key = ('file-id', 'revid')
445
        repo.texts.add_lines(text_key, (), ['lines'])
446
        wg_tokens = repo.suspend_write_group()
447
        same_repo = self.reopen_repo(repo)
448
        same_repo.resume_write_group(wg_tokens)
449
        same_repo.suspend_write_group()
450
        self.assertEqual([], list(same_repo.texts.keys()))
451
452
    def test_read_after_resume_abort_fails(self):
453
        self.require_suspendable_write_groups(
454
            'Cannot test suspend on repo that does not support suspending')
455
        repo = self.make_write_locked_repo()
456
        repo.start_write_group()
457
        # Add some content so this isn't an empty write group (which may return
458
        # 0 tokens)
459
        text_key = ('file-id', 'revid')
460
        repo.texts.add_lines(text_key, (), ['lines'])
461
        wg_tokens = repo.suspend_write_group()
462
        same_repo = self.reopen_repo(repo)
463
        same_repo.resume_write_group(wg_tokens)
464
        same_repo.abort_write_group()
465
        self.assertEqual([], list(same_repo.texts.keys()))
466
467
    def test_cannot_resume_aborted_write_group(self):
468
        self.require_suspendable_write_groups(
469
            'Cannot test resume on repo that does not support suspending')
470
        repo = self.make_write_locked_repo()
471
        repo.start_write_group()
472
        # Add some content so this isn't an empty write group (which may return
473
        # 0 tokens)
474
        text_key = ('file-id', 'revid')
475
        repo.texts.add_lines(text_key, (), ['lines'])
476
        wg_tokens = repo.suspend_write_group()
477
        same_repo = self.reopen_repo(repo)
478
        same_repo.resume_write_group(wg_tokens)
479
        same_repo.abort_write_group()
480
        same_repo = self.reopen_repo(repo)
481
        self.assertRaises(
482
            errors.UnresumableWriteGroup, same_repo.resume_write_group,
483
            wg_tokens)
484
485
    def test_commit_resumed_write_group_no_new_data(self):
486
        self.require_suspendable_write_groups(
487
            'Cannot test resume on repo that does not support suspending')
488
        repo = self.make_write_locked_repo()
489
        repo.start_write_group()
490
        # Add some content so this isn't an empty write group (which may return
491
        # 0 tokens)
492
        text_key = ('file-id', 'revid')
493
        repo.texts.add_lines(text_key, (), ['lines'])
494
        wg_tokens = repo.suspend_write_group()
495
        same_repo = self.reopen_repo(repo)
496
        same_repo.resume_write_group(wg_tokens)
497
        same_repo.commit_write_group()
498
        self.assertEqual([text_key], list(same_repo.texts.keys()))
499
        self.assertEqual(
500
            'lines', same_repo.texts.get_record_stream([text_key],
501
                'unordered', True).next().get_bytes_as('fulltext'))
502
        self.assertRaises(
503
            errors.UnresumableWriteGroup, same_repo.resume_write_group,
504
            wg_tokens)
505
506
    def test_commit_resumed_write_group_plus_new_data(self):
507
        self.require_suspendable_write_groups(
508
            'Cannot test resume on repo that does not support suspending')
509
        repo = self.make_write_locked_repo()
510
        repo.start_write_group()
511
        # Add some content so this isn't an empty write group (which may return
512
        # 0 tokens)
513
        first_key = ('file-id', 'revid')
514
        repo.texts.add_lines(first_key, (), ['lines'])
515
        wg_tokens = repo.suspend_write_group()
516
        same_repo = self.reopen_repo(repo)
517
        same_repo.resume_write_group(wg_tokens)
518
        second_key = ('file-id', 'second-revid')
519
        same_repo.texts.add_lines(second_key, (first_key,), ['more lines'])
520
        same_repo.commit_write_group()
521
        self.assertEqual(
522
            set([first_key, second_key]), set(same_repo.texts.keys()))
523
        self.assertEqual(
524
            'lines', same_repo.texts.get_record_stream([first_key],
525
                'unordered', True).next().get_bytes_as('fulltext'))
526
        self.assertEqual(
527
            'more lines', same_repo.texts.get_record_stream([second_key],
528
                'unordered', True).next().get_bytes_as('fulltext'))
529
530
    def make_source_with_delta_record(self):
531
        # Make a source repository with a delta record in it.
532
        source_repo = self.make_write_locked_repo('source')
533
        source_repo.start_write_group()
534
        key_base = ('file-id', 'base')
535
        key_delta = ('file-id', 'delta')
536
        def text_stream():
537
            yield versionedfile.FulltextContentFactory(
538
                key_base, (), None, 'lines\n')
539
            yield versionedfile.FulltextContentFactory(
540
                key_delta, (key_base,), None, 'more\nlines\n')
541
        source_repo.texts.insert_record_stream(text_stream())
542
        source_repo.commit_write_group()
543
        return source_repo
544
545
    def test_commit_resumed_write_group_with_missing_parents(self):
546
        self.require_suspendable_write_groups(
547
            'Cannot test resume on repo that does not support suspending')
548
        source_repo = self.make_source_with_delta_record()
549
        key_base = ('file-id', 'base')
550
        key_delta = ('file-id', 'delta')
551
        # Start a write group, insert just a delta.
552
        repo = self.make_write_locked_repo()
553
        repo.start_write_group()
554
        stream = source_repo.texts.get_record_stream(
555
            [key_delta], 'unordered', False)
556
        repo.texts.insert_record_stream(stream)
557
        # It's either not commitable due to the missing compression parent, or
558
        # the stacked location has already filled in the fulltext.
559
        try:
560
            repo.commit_write_group()
561
        except errors.BzrCheckError:
562
            # It refused to commit because we have a missing parent
563
            pass
564
        else:
565
            same_repo = self.reopen_repo(repo)
566
            same_repo.lock_read()
567
            record = same_repo.texts.get_record_stream([key_delta],
568
                                                       'unordered', True).next()
569
            self.assertEqual('more\nlines\n', record.get_bytes_as('fulltext'))
570
            return
571
        # Merely suspending and resuming doesn't make it commitable either.
572
        wg_tokens = repo.suspend_write_group()
573
        same_repo = self.reopen_repo(repo)
574
        same_repo.resume_write_group(wg_tokens)
575
        self.assertRaises(
576
            errors.BzrCheckError, same_repo.commit_write_group)
577
        same_repo.abort_write_group()
578
579
    def test_commit_resumed_write_group_adding_missing_parents(self):
580
        self.require_suspendable_write_groups(
581
            'Cannot test resume on repo that does not support suspending')
582
        source_repo = self.make_source_with_delta_record()
583
        key_base = ('file-id', 'base')
584
        key_delta = ('file-id', 'delta')
585
        # Start a write group.
586
        repo = self.make_write_locked_repo()
587
        repo.start_write_group()
588
        # Add some content so this isn't an empty write group (which may return
589
        # 0 tokens)
590
        text_key = ('file-id', 'revid')
591
        repo.texts.add_lines(text_key, (), ['lines'])
592
        # Suspend it, then resume it.
593
        wg_tokens = repo.suspend_write_group()
594
        same_repo = self.reopen_repo(repo)
595
        same_repo.resume_write_group(wg_tokens)
596
        # Add a record with a missing compression parent
597
        stream = source_repo.texts.get_record_stream(
598
            [key_delta], 'unordered', False)
599
        same_repo.texts.insert_record_stream(stream)
600
        # Just like if we'd added that record without a suspend/resume cycle,
601
        # commit_write_group fails.
602
        try:
603
            same_repo.commit_write_group()
604
        except errors.BzrCheckError:
605
            pass
606
        else:
607
            # If the commit_write_group didn't fail, that is because the
608
            # insert_record_stream already gave it a fulltext.
609
            same_repo = self.reopen_repo(repo)
610
            same_repo.lock_read()
611
            record = same_repo.texts.get_record_stream([key_delta],
612
                                                       'unordered', True).next()
613
            self.assertEqual('more\nlines\n', record.get_bytes_as('fulltext'))
614
            return
615
        same_repo.abort_write_group()
616
617
    def test_add_missing_parent_after_resume(self):
618
        self.require_suspendable_write_groups(
619
            'Cannot test resume on repo that does not support suspending')
620
        source_repo = self.make_source_with_delta_record()
621
        key_base = ('file-id', 'base')
622
        key_delta = ('file-id', 'delta')
623
        # Start a write group, insert just a delta.
624
        repo = self.make_write_locked_repo()
625
        repo.start_write_group()
626
        stream = source_repo.texts.get_record_stream(
627
            [key_delta], 'unordered', False)
628
        repo.texts.insert_record_stream(stream)
629
        # Suspend it, then resume it.
630
        wg_tokens = repo.suspend_write_group()
631
        same_repo = self.reopen_repo(repo)
632
        same_repo.resume_write_group(wg_tokens)
633
        # Fill in the missing compression parent.
634
        stream = source_repo.texts.get_record_stream(
635
            [key_base], 'unordered', False)
636
        same_repo.texts.insert_record_stream(stream)
637
        same_repo.commit_write_group()
638
639
    def test_suspend_empty_initial_write_group(self):
640
        """Suspending a write group with no writes returns an empty token
641
        list.
642
        """
643
        self.require_suspendable_write_groups(
644
            'Cannot test suspend on repo that does not support suspending')
645
        repo = self.make_write_locked_repo()
646
        repo.start_write_group()
647
        wg_tokens = repo.suspend_write_group()
648
        self.assertEqual([], wg_tokens)
649
650
    def test_resume_empty_initial_write_group(self):
651
        """Resuming an empty token list is equivalent to start_write_group."""
652
        self.require_suspendable_write_groups(
653
            'Cannot test resume on repo that does not support suspending')
654
        repo = self.make_write_locked_repo()
655
        repo.resume_write_group([])
656
        repo.abort_write_group()