1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for the Repository facility that are not interface tests.
19
For interface tests see tests/per_repository/*.py.
21
For concrete class tests see this file, and for storage formats tests
25
from stat import S_ISDIR
28
from breezy.errors import (
30
UnsupportedFormatError,
36
from breezy.bzr import (
40
repository as bzrrepository,
45
from breezy.bzr.btree_index import BTreeBuilder, BTreeGraphIndex
46
from breezy.bzr.index import GraphIndex
47
from breezy.repository import RepositoryFormat
48
from breezy.tests import (
50
TestCaseWithTransport,
57
revision as _mod_revision,
61
from breezy.bzr import (
69
class TestDefaultFormat(TestCase):
71
def test_get_set_default_format(self):
72
old_default = controldir.format_registry.get('default')
73
private_default = old_default().repository_format.__class__
74
old_format = repository.format_registry.get_default()
75
self.assertTrue(isinstance(old_format, private_default))
76
def make_sample_bzrdir():
77
my_bzrdir = bzrdir.BzrDirMetaFormat1()
78
my_bzrdir.repository_format = SampleRepositoryFormat()
80
controldir.format_registry.remove('default')
81
controldir.format_registry.register('sample', make_sample_bzrdir, '')
82
controldir.format_registry.set_default('sample')
83
# creating a repository should now create an instrumented dir.
85
# the default branch format is used by the meta dir format
86
# which is not the default bzrdir format at this point
87
dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
88
result = dir.create_repository()
89
self.assertEqual(result, 'A bzr repository dir')
91
controldir.format_registry.remove('default')
92
controldir.format_registry.remove('sample')
93
controldir.format_registry.register('default', old_default, '')
94
self.assertIsInstance(repository.format_registry.get_default(),
98
class SampleRepositoryFormat(bzrrepository.RepositoryFormatMetaDir):
101
this format is initializable, unsupported to aid in testing the
102
open and open(unsupported=True) routines.
106
def get_format_string(cls):
107
"""See RepositoryFormat.get_format_string()."""
108
return "Sample .bzr repository format."
110
def initialize(self, a_controldir, shared=False):
111
"""Initialize a repository in a BzrDir"""
112
t = a_controldir.get_repository_transport(self)
113
t.put_bytes('format', self.get_format_string())
114
return 'A bzr repository dir'
116
def is_supported(self):
119
def open(self, a_controldir, _found=False):
120
return "opened repository."
123
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
124
"""A sample format that can not be used in a metadir
128
def get_format_string(self):
129
raise NotImplementedError
132
class TestRepositoryFormat(TestCaseWithTransport):
133
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
135
def test_find_format(self):
136
# is the right format object found for a repository?
137
# create a branch with a few known format objects.
138
# this is not quite the same as
139
self.build_tree(["foo/", "bar/"])
140
def check_format(format, url):
141
dir = format._matchingbzrdir.initialize(url)
142
format.initialize(dir)
143
t = transport.get_transport_from_path(url)
144
found_format = bzrrepository.RepositoryFormatMetaDir.find_format(dir)
145
self.assertIsInstance(found_format, format.__class__)
146
check_format(repository.format_registry.get_default(), "bar")
148
def test_find_format_no_repository(self):
149
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
150
self.assertRaises(errors.NoRepositoryPresent,
151
bzrrepository.RepositoryFormatMetaDir.find_format,
154
def test_from_string(self):
155
self.assertIsInstance(
156
SampleRepositoryFormat.from_string(
157
"Sample .bzr repository format."),
158
SampleRepositoryFormat)
159
self.assertRaises(AssertionError,
160
SampleRepositoryFormat.from_string,
161
"Different .bzr repository format.")
163
def test_find_format_unknown_format(self):
164
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
165
SampleRepositoryFormat().initialize(dir)
166
self.assertRaises(UnknownFormatError,
167
bzrrepository.RepositoryFormatMetaDir.find_format,
170
def test_find_format_with_features(self):
171
tree = self.make_branch_and_tree('.', format='2a')
172
tree.branch.repository.update_feature_flags({"name": "necessity"})
173
found_format = bzrrepository.RepositoryFormatMetaDir.find_format(tree.controldir)
174
self.assertIsInstance(found_format, bzrrepository.RepositoryFormatMetaDir)
175
self.assertEqual(found_format.features.get("name"), "necessity")
176
self.assertRaises(bzrdir.MissingFeature, found_format.check_support_status,
178
self.addCleanup(bzrrepository.RepositoryFormatMetaDir.unregister_feature,
180
bzrrepository.RepositoryFormatMetaDir.register_feature("name")
181
found_format.check_support_status(True)
184
class TestRepositoryFormatRegistry(TestCase):
187
super(TestRepositoryFormatRegistry, self).setUp()
188
self.registry = repository.RepositoryFormatRegistry()
190
def test_register_unregister_format(self):
191
format = SampleRepositoryFormat()
192
self.registry.register(format)
193
self.assertEqual(format, self.registry.get("Sample .bzr repository format."))
194
self.registry.remove(format)
195
self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
197
def test_get_all(self):
198
format = SampleRepositoryFormat()
199
self.assertEqual([], self.registry._get_all())
200
self.registry.register(format)
201
self.assertEqual([format], self.registry._get_all())
203
def test_register_extra(self):
204
format = SampleExtraRepositoryFormat()
205
self.assertEqual([], self.registry._get_all())
206
self.registry.register_extra(format)
207
self.assertEqual([format], self.registry._get_all())
209
def test_register_extra_lazy(self):
210
self.assertEqual([], self.registry._get_all())
211
self.registry.register_extra_lazy("breezy.tests.test_repository",
212
"SampleExtraRepositoryFormat")
213
formats = self.registry._get_all()
214
self.assertEqual(1, len(formats))
215
self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
218
class TestFormatKnit1(TestCaseWithTransport):
220
def test_attribute__fetch_order(self):
221
"""Knits need topological data insertion."""
222
repo = self.make_repository('.',
223
format=controldir.format_registry.get('knit')())
224
self.assertEqual('topological', repo._format._fetch_order)
226
def test_attribute__fetch_uses_deltas(self):
227
"""Knits reuse deltas."""
228
repo = self.make_repository('.',
229
format=controldir.format_registry.get('knit')())
230
self.assertEqual(True, repo._format._fetch_uses_deltas)
232
def test_disk_layout(self):
233
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
234
repo = knitrepo.RepositoryFormatKnit1().initialize(control)
235
# in case of side effects of locking.
239
# format 'Bazaar-NG Knit Repository Format 1'
240
# lock: is a directory
241
# inventory.weave == empty_weave
242
# empty revision-store directory
243
# empty weaves directory
244
t = control.get_repository_transport(None)
245
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
246
t.get('format').read())
247
# XXX: no locks left when unlocked at the moment
248
# self.assertEqualDiff('', t.get('lock').read())
249
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
251
# Check per-file knits.
252
branch = control.create_branch()
253
tree = control.create_workingtree()
254
tree.add(['foo'], ['Nasty-IdC:'], ['file'])
255
tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
256
tree.commit('1st post', rev_id='foo')
257
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
258
'\nfoo fulltext 0 81 :')
260
def assertHasKnit(self, t, knit_name, extra_content=''):
261
"""Assert that knit_name exists on t."""
262
self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
263
t.get(knit_name + '.kndx').read())
265
def check_knits(self, t):
266
"""check knit content for a repository."""
267
self.assertHasKnit(t, 'inventory')
268
self.assertHasKnit(t, 'revisions')
269
self.assertHasKnit(t, 'signatures')
271
def test_shared_disk_layout(self):
272
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
273
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
275
# format 'Bazaar-NG Knit Repository Format 1'
276
# lock: is a directory
277
# inventory.weave == empty_weave
278
# empty revision-store directory
279
# empty weaves directory
280
# a 'shared-storage' marker file.
281
t = control.get_repository_transport(None)
282
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
283
t.get('format').read())
284
# XXX: no locks left when unlocked at the moment
285
# self.assertEqualDiff('', t.get('lock').read())
286
self.assertEqualDiff('', t.get('shared-storage').read())
287
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
290
def test_shared_no_tree_disk_layout(self):
291
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
292
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
293
repo.set_make_working_trees(False)
295
# format 'Bazaar-NG Knit Repository Format 1'
297
# inventory.weave == empty_weave
298
# empty revision-store directory
299
# empty weaves directory
300
# a 'shared-storage' marker file.
301
t = control.get_repository_transport(None)
302
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
303
t.get('format').read())
304
# XXX: no locks left when unlocked at the moment
305
# self.assertEqualDiff('', t.get('lock').read())
306
self.assertEqualDiff('', t.get('shared-storage').read())
307
self.assertEqualDiff('', t.get('no-working-trees').read())
308
repo.set_make_working_trees(True)
309
self.assertFalse(t.has('no-working-trees'))
310
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
313
def test_deserialise_sets_root_revision(self):
314
"""We must have a inventory.root.revision
316
Old versions of the XML5 serializer did not set the revision_id for
317
the whole inventory. So we grab the one from the expected text. Which
318
is valid when the api is not being abused.
320
repo = self.make_repository('.',
321
format=controldir.format_registry.get('knit')())
322
inv_xml = '<inventory format="5">\n</inventory>\n'
323
inv = repo._deserialise_inventory('test-rev-id', inv_xml)
324
self.assertEqual('test-rev-id', inv.root.revision)
326
def test_deserialise_uses_global_revision_id(self):
327
"""If it is set, then we re-use the global revision id"""
328
repo = self.make_repository('.',
329
format=controldir.format_registry.get('knit')())
330
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
332
# Arguably, the deserialise_inventory should detect a mismatch, and
333
# raise an error, rather than silently using one revision_id over the
335
self.assertRaises(AssertionError, repo._deserialise_inventory,
336
'test-rev-id', inv_xml)
337
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
338
self.assertEqual('other-rev-id', inv.root.revision)
340
def test_supports_external_lookups(self):
341
repo = self.make_repository('.',
342
format=controldir.format_registry.get('knit')())
343
self.assertFalse(repo._format.supports_external_lookups)
346
class DummyRepository(object):
347
"""A dummy repository for testing."""
352
def supports_rich_root(self):
353
if self._format is not None:
354
return self._format.rich_root_data
358
raise NotImplementedError
360
def get_parent_map(self, revision_ids):
361
raise NotImplementedError
364
class InterDummy(repository.InterRepository):
365
"""An inter-repository optimised code path for DummyRepository.
367
This is for use during testing where we use DummyRepository as repositories
368
so that none of the default regsitered inter-repository classes will
373
def is_compatible(repo_source, repo_target):
374
"""InterDummy is compatible with DummyRepository."""
375
return (isinstance(repo_source, DummyRepository) and
376
isinstance(repo_target, DummyRepository))
379
class TestInterRepository(TestCaseWithTransport):
381
def test_get_default_inter_repository(self):
382
# test that the InterRepository.get(repo_a, repo_b) probes
383
# for a inter_repo class where is_compatible(repo_a, repo_b) returns
384
# true and returns a default inter_repo otherwise.
385
# This also tests that the default registered optimised interrepository
386
# classes do not barf inappropriately when a surprising repository type
388
dummy_a = DummyRepository()
389
dummy_a._format = RepositoryFormat()
390
dummy_a._format.supports_full_versioned_files = True
391
dummy_b = DummyRepository()
392
dummy_b._format = RepositoryFormat()
393
dummy_b._format.supports_full_versioned_files = True
394
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
396
def assertGetsDefaultInterRepository(self, repo_a, repo_b):
397
"""Asserts that InterRepository.get(repo_a, repo_b) -> the default.
399
The effective default is now InterSameDataRepository because there is
400
no actual sane default in the presence of incompatible data models.
402
inter_repo = repository.InterRepository.get(repo_a, repo_b)
403
self.assertEqual(vf_repository.InterSameDataRepository,
404
inter_repo.__class__)
405
self.assertEqual(repo_a, inter_repo.source)
406
self.assertEqual(repo_b, inter_repo.target)
408
def test_register_inter_repository_class(self):
409
# test that a optimised code path provider - a
410
# InterRepository subclass can be registered and unregistered
411
# and that it is correctly selected when given a repository
412
# pair that it returns true on for the is_compatible static method
414
dummy_a = DummyRepository()
415
dummy_a._format = RepositoryFormat()
416
dummy_b = DummyRepository()
417
dummy_b._format = RepositoryFormat()
418
repo = self.make_repository('.')
419
# hack dummies to look like repo somewhat.
420
dummy_a._serializer = repo._serializer
421
dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
422
dummy_a._format.rich_root_data = repo._format.rich_root_data
423
dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
424
dummy_b._serializer = repo._serializer
425
dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
426
dummy_b._format.rich_root_data = repo._format.rich_root_data
427
dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
428
repository.InterRepository.register_optimiser(InterDummy)
430
# we should get the default for something InterDummy returns False
432
self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
433
self.assertGetsDefaultInterRepository(dummy_a, repo)
434
# and we should get an InterDummy for a pair it 'likes'
435
self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
436
inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
437
self.assertEqual(InterDummy, inter_repo.__class__)
438
self.assertEqual(dummy_a, inter_repo.source)
439
self.assertEqual(dummy_b, inter_repo.target)
441
repository.InterRepository.unregister_optimiser(InterDummy)
442
# now we should get the default InterRepository object again.
443
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
446
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
449
def get_format_string(cls):
450
return "Test Format 1"
453
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
456
def get_format_string(cls):
457
return "Test Format 2"
460
class TestRepositoryConverter(TestCaseWithTransport):
462
def test_convert_empty(self):
463
source_format = TestRepositoryFormat1()
464
target_format = TestRepositoryFormat2()
465
repository.format_registry.register(source_format)
466
self.addCleanup(repository.format_registry.remove,
468
repository.format_registry.register(target_format)
469
self.addCleanup(repository.format_registry.remove,
471
t = self.get_transport()
472
t.mkdir('repository')
473
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
474
repo = TestRepositoryFormat1().initialize(repo_dir)
475
converter = repository.CopyConverter(target_format)
476
pb = breezy.ui.ui_factory.nested_progress_bar()
478
converter.convert(repo, pb)
481
repo = repo_dir.open_repository()
482
self.assertTrue(isinstance(target_format, repo._format.__class__))
485
class TestRepositoryFormatKnit3(TestCaseWithTransport):
487
def test_attribute__fetch_order(self):
488
"""Knits need topological data insertion."""
489
format = bzrdir.BzrDirMetaFormat1()
490
format.repository_format = knitrepo.RepositoryFormatKnit3()
491
repo = self.make_repository('.', format=format)
492
self.assertEqual('topological', repo._format._fetch_order)
494
def test_attribute__fetch_uses_deltas(self):
495
"""Knits reuse deltas."""
496
format = bzrdir.BzrDirMetaFormat1()
497
format.repository_format = knitrepo.RepositoryFormatKnit3()
498
repo = self.make_repository('.', format=format)
499
self.assertEqual(True, repo._format._fetch_uses_deltas)
501
def test_convert(self):
502
"""Ensure the upgrade adds weaves for roots"""
503
format = bzrdir.BzrDirMetaFormat1()
504
format.repository_format = knitrepo.RepositoryFormatKnit1()
505
tree = self.make_branch_and_tree('.', format)
506
tree.commit("Dull commit", rev_id="dull")
507
revision_tree = tree.branch.repository.revision_tree('dull')
508
revision_tree.lock_read()
510
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
511
revision_tree.get_root_id())
513
revision_tree.unlock()
514
format = bzrdir.BzrDirMetaFormat1()
515
format.repository_format = knitrepo.RepositoryFormatKnit3()
516
upgrade.Convert('.', format)
517
tree = workingtree.WorkingTree.open('.')
518
revision_tree = tree.branch.repository.revision_tree('dull')
519
revision_tree.lock_read()
521
revision_tree.get_file_lines(revision_tree.get_root_id())
523
revision_tree.unlock()
524
tree.commit("Another dull commit", rev_id='dull2')
525
revision_tree = tree.branch.repository.revision_tree('dull2')
526
revision_tree.lock_read()
527
self.addCleanup(revision_tree.unlock)
528
self.assertEqual('dull',
529
revision_tree.get_file_revision(revision_tree.get_root_id()))
531
def test_supports_external_lookups(self):
532
format = bzrdir.BzrDirMetaFormat1()
533
format.repository_format = knitrepo.RepositoryFormatKnit3()
534
repo = self.make_repository('.', format=format)
535
self.assertFalse(repo._format.supports_external_lookups)
538
class Test2a(tests.TestCaseWithMemoryTransport):
540
def test_chk_bytes_uses_custom_btree_parser(self):
541
mt = self.make_branch_and_memory_tree('test', format='2a')
543
self.addCleanup(mt.unlock)
544
mt.add([''], ['root-id'])
546
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
547
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
548
# It should also work if we re-open the repo
549
repo = mt.branch.repository.controldir.open_repository()
551
self.addCleanup(repo.unlock)
552
index = repo.chk_bytes._index._graph_index._indices[0]
553
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
555
def test_fetch_combines_groups(self):
556
builder = self.make_branch_builder('source', format='2a')
557
builder.start_series()
558
builder.build_snapshot('1', None, [
559
('add', ('', 'root-id', 'directory', '')),
560
('add', ('file', 'file-id', 'file', 'content\n'))])
561
builder.build_snapshot('2', ['1'], [
562
('modify', ('file-id', 'content-2\n'))])
563
builder.finish_series()
564
source = builder.get_branch()
565
target = self.make_repository('target', format='2a')
566
target.fetch(source.repository)
568
self.addCleanup(target.unlock)
569
details = target.texts._index.get_build_details(
570
[('file-id', '1',), ('file-id', '2',)])
571
file_1_details = details[('file-id', '1')]
572
file_2_details = details[('file-id', '2')]
573
# The index, and what to read off disk, should be the same for both
574
# versions of the file.
575
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
577
def test_fetch_combines_groups(self):
578
builder = self.make_branch_builder('source', format='2a')
579
builder.start_series()
580
builder.build_snapshot('1', None, [
581
('add', ('', 'root-id', 'directory', '')),
582
('add', ('file', 'file-id', 'file', 'content\n'))])
583
builder.build_snapshot('2', ['1'], [
584
('modify', ('file-id', 'content-2\n'))])
585
builder.finish_series()
586
source = builder.get_branch()
587
target = self.make_repository('target', format='2a')
588
target.fetch(source.repository)
590
self.addCleanup(target.unlock)
591
details = target.texts._index.get_build_details(
592
[('file-id', '1',), ('file-id', '2',)])
593
file_1_details = details[('file-id', '1')]
594
file_2_details = details[('file-id', '2')]
595
# The index, and what to read off disk, should be the same for both
596
# versions of the file.
597
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
599
def test_fetch_combines_groups(self):
600
builder = self.make_branch_builder('source', format='2a')
601
builder.start_series()
602
builder.build_snapshot('1', None, [
603
('add', ('', 'root-id', 'directory', '')),
604
('add', ('file', 'file-id', 'file', 'content\n'))])
605
builder.build_snapshot('2', ['1'], [
606
('modify', ('file-id', 'content-2\n'))])
607
builder.finish_series()
608
source = builder.get_branch()
609
target = self.make_repository('target', format='2a')
610
target.fetch(source.repository)
612
self.addCleanup(target.unlock)
613
details = target.texts._index.get_build_details(
614
[('file-id', '1',), ('file-id', '2',)])
615
file_1_details = details[('file-id', '1')]
616
file_2_details = details[('file-id', '2')]
617
# The index, and what to read off disk, should be the same for both
618
# versions of the file.
619
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
621
def test_format_pack_compresses_True(self):
622
repo = self.make_repository('repo', format='2a')
623
self.assertTrue(repo._format.pack_compresses)
625
def test_inventories_use_chk_map_with_parent_base_dict(self):
626
tree = self.make_branch_and_memory_tree('repo', format="2a")
628
tree.add([''], ['TREE_ROOT'])
629
revid = tree.commit("foo")
632
self.addCleanup(tree.unlock)
633
inv = tree.branch.repository.get_inventory(revid)
634
self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
635
inv.parent_id_basename_to_file_id._ensure_root()
636
inv.id_to_entry._ensure_root()
637
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
638
self.assertEqual(65536,
639
inv.parent_id_basename_to_file_id._root_node.maximum_size)
641
def test_autopack_unchanged_chk_nodes(self):
642
# at 20 unchanged commits, chk pages are packed that are split into
643
# two groups such that the new pack being made doesn't have all its
644
# pages in the source packs (though they are in the repository).
645
# Use a memory backed repository, we don't need to hit disk for this
646
tree = self.make_branch_and_memory_tree('tree', format='2a')
648
self.addCleanup(tree.unlock)
649
tree.add([''], ['TREE_ROOT'])
650
for pos in range(20):
651
tree.commit(str(pos))
653
def test_pack_with_hint(self):
654
tree = self.make_branch_and_memory_tree('tree', format='2a')
656
self.addCleanup(tree.unlock)
657
tree.add([''], ['TREE_ROOT'])
658
# 1 commit to leave untouched
660
to_keep = tree.branch.repository._pack_collection.names()
664
all = tree.branch.repository._pack_collection.names()
665
combine = list(set(all) - set(to_keep))
666
self.assertLength(3, all)
667
self.assertLength(2, combine)
668
tree.branch.repository.pack(hint=combine)
669
final = tree.branch.repository._pack_collection.names()
670
self.assertLength(2, final)
671
self.assertFalse(combine[0] in final)
672
self.assertFalse(combine[1] in final)
673
self.assertSubset(to_keep, final)
675
def test_stream_source_to_gc(self):
676
source = self.make_repository('source', format='2a')
677
target = self.make_repository('target', format='2a')
678
stream = source._get_source(target._format)
679
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
681
def test_stream_source_to_non_gc(self):
682
source = self.make_repository('source', format='2a')
683
target = self.make_repository('target', format='rich-root-pack')
684
stream = source._get_source(target._format)
685
# We don't want the child GroupCHKStreamSource
686
self.assertIs(type(stream), vf_repository.StreamSource)
688
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
689
source_builder = self.make_branch_builder('source',
691
# We have to build a fairly large tree, so that we are sure the chk
692
# pages will have split into multiple pages.
693
entries = [('add', ('', 'a-root-id', 'directory', None))]
694
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
695
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
698
content = 'content for %s\n' % (fname,)
699
entries.append(('add', (fname, fid, 'file', content)))
700
source_builder.start_series()
701
source_builder.build_snapshot('rev-1', None, entries)
702
# Now change a few of them, so we get a few new pages for the second
704
source_builder.build_snapshot('rev-2', ['rev-1'], [
705
('modify', ('aa-id', 'new content for aa-id\n')),
706
('modify', ('cc-id', 'new content for cc-id\n')),
707
('modify', ('zz-id', 'new content for zz-id\n')),
709
source_builder.finish_series()
710
source_branch = source_builder.get_branch()
711
source_branch.lock_read()
712
self.addCleanup(source_branch.unlock)
713
target = self.make_repository('target', format='2a')
714
source = source_branch.repository._get_source(target._format)
715
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
717
# On a regular pass, getting the inventories and chk pages for rev-2
718
# would only get the newly created chk pages
719
search = vf_search.SearchResult({'rev-2'}, {'rev-1'}, 1,
721
simple_chk_records = []
722
for vf_name, substream in source.get_stream(search):
723
if vf_name == 'chk_bytes':
724
for record in substream:
725
simple_chk_records.append(record.key)
729
# 3 pages, the root (InternalNode), + 2 pages which actually changed
730
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
731
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
732
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
733
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
735
# Now, when we do a similar call using 'get_stream_for_missing_keys'
736
# we should get a much larger set of pages.
737
missing = [('inventories', 'rev-2')]
738
full_chk_records = []
739
for vf_name, substream in source.get_stream_for_missing_keys(missing):
740
if vf_name == 'inventories':
741
for record in substream:
742
self.assertEqual(('rev-2',), record.key)
743
elif vf_name == 'chk_bytes':
744
for record in substream:
745
full_chk_records.append(record.key)
747
self.fail('Should not be getting a stream of %s' % (vf_name,))
748
# We have 257 records now. This is because we have 1 root page, and 256
749
# leaf pages in a complete listing.
750
self.assertEqual(257, len(full_chk_records))
751
self.assertSubset(simple_chk_records, full_chk_records)
753
def test_inconsistency_fatal(self):
754
repo = self.make_repository('repo', format='2a')
755
self.assertTrue(repo.revisions._index._inconsistency_fatal)
756
self.assertFalse(repo.texts._index._inconsistency_fatal)
757
self.assertFalse(repo.inventories._index._inconsistency_fatal)
758
self.assertFalse(repo.signatures._index._inconsistency_fatal)
759
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
762
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
764
def test_source_to_exact_pack_092(self):
765
source = self.make_repository('source', format='pack-0.92')
766
target = self.make_repository('target', format='pack-0.92')
767
stream_source = source._get_source(target._format)
768
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
770
def test_source_to_exact_pack_rich_root_pack(self):
771
source = self.make_repository('source', format='rich-root-pack')
772
target = self.make_repository('target', format='rich-root-pack')
773
stream_source = source._get_source(target._format)
774
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
776
def test_source_to_exact_pack_19(self):
777
source = self.make_repository('source', format='1.9')
778
target = self.make_repository('target', format='1.9')
779
stream_source = source._get_source(target._format)
780
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
782
def test_source_to_exact_pack_19_rich_root(self):
783
source = self.make_repository('source', format='1.9-rich-root')
784
target = self.make_repository('target', format='1.9-rich-root')
785
stream_source = source._get_source(target._format)
786
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
788
def test_source_to_remote_exact_pack_19(self):
789
trans = self.make_smart_server('target')
791
source = self.make_repository('source', format='1.9')
792
target = self.make_repository('target', format='1.9')
793
target = repository.Repository.open(trans.base)
794
stream_source = source._get_source(target._format)
795
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
797
def test_stream_source_to_non_exact(self):
798
source = self.make_repository('source', format='pack-0.92')
799
target = self.make_repository('target', format='1.9')
800
stream = source._get_source(target._format)
801
self.assertIs(type(stream), vf_repository.StreamSource)
803
def test_stream_source_to_non_exact_rich_root(self):
804
source = self.make_repository('source', format='1.9')
805
target = self.make_repository('target', format='1.9-rich-root')
806
stream = source._get_source(target._format)
807
self.assertIs(type(stream), vf_repository.StreamSource)
809
def test_source_to_remote_non_exact_pack_19(self):
810
trans = self.make_smart_server('target')
812
source = self.make_repository('source', format='1.9')
813
target = self.make_repository('target', format='1.6')
814
target = repository.Repository.open(trans.base)
815
stream_source = source._get_source(target._format)
816
self.assertIs(type(stream_source), vf_repository.StreamSource)
818
def test_stream_source_to_knit(self):
819
source = self.make_repository('source', format='pack-0.92')
820
target = self.make_repository('target', format='dirstate')
821
stream = source._get_source(target._format)
822
self.assertIs(type(stream), vf_repository.StreamSource)
825
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
826
"""Tests for _find_parent_ids_of_revisions."""
829
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
830
self.builder = self.make_branch_builder('source')
831
self.builder.start_series()
832
self.builder.build_snapshot('initial', None,
833
[('add', ('', 'tree-root', 'directory', None))])
834
self.repo = self.builder.get_branch().repository
835
self.addCleanup(self.builder.finish_series)
837
def assertParentIds(self, expected_result, rev_set):
838
self.assertEqual(sorted(expected_result),
839
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
841
def test_simple(self):
842
self.builder.build_snapshot('revid1', None, [])
843
self.builder.build_snapshot('revid2', ['revid1'], [])
845
self.assertParentIds(['revid1'], rev_set)
847
def test_not_first_parent(self):
848
self.builder.build_snapshot('revid1', None, [])
849
self.builder.build_snapshot('revid2', ['revid1'], [])
850
self.builder.build_snapshot('revid3', ['revid2'], [])
851
rev_set = ['revid3', 'revid2']
852
self.assertParentIds(['revid1'], rev_set)
854
def test_not_null(self):
855
rev_set = ['initial']
856
self.assertParentIds([], rev_set)
858
def test_not_null_set(self):
859
self.builder.build_snapshot('revid1', None, [])
860
rev_set = [_mod_revision.NULL_REVISION]
861
self.assertParentIds([], rev_set)
863
def test_ghost(self):
864
self.builder.build_snapshot('revid1', None, [])
865
rev_set = ['ghost', 'revid1']
866
self.assertParentIds(['initial'], rev_set)
868
def test_ghost_parent(self):
869
self.builder.build_snapshot('revid1', None, [])
870
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
871
rev_set = ['revid2', 'revid1']
872
self.assertParentIds(['ghost', 'initial'], rev_set)
874
def test_righthand_parent(self):
875
self.builder.build_snapshot('revid1', None, [])
876
self.builder.build_snapshot('revid2a', ['revid1'], [])
877
self.builder.build_snapshot('revid2b', ['revid1'], [])
878
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
879
rev_set = ['revid3', 'revid2a']
880
self.assertParentIds(['revid1', 'revid2b'], rev_set)
883
class TestWithBrokenRepo(TestCaseWithTransport):
884
"""These tests seem to be more appropriate as interface tests?"""
886
def make_broken_repository(self):
887
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
888
# parent references" branch which is due to land in bzr.dev soon. Once
889
# it does, this duplication should be removed.
890
repo = self.make_repository('broken-repo')
894
cleanups.append(repo.unlock)
895
repo.start_write_group()
896
cleanups.append(repo.commit_write_group)
897
# make rev1a: A well-formed revision, containing 'file1'
898
inv = inventory.Inventory(revision_id='rev1a')
899
inv.root.revision = 'rev1a'
900
self.add_file(repo, inv, 'file1', 'rev1a', [])
901
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
902
repo.add_inventory('rev1a', inv, [])
903
revision = _mod_revision.Revision('rev1a',
904
committer='jrandom@example.com', timestamp=0,
905
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
906
repo.add_revision('rev1a', revision, inv)
908
# make rev1b, which has no Revision, but has an Inventory, and
910
inv = inventory.Inventory(revision_id='rev1b')
911
inv.root.revision = 'rev1b'
912
self.add_file(repo, inv, 'file1', 'rev1b', [])
913
repo.add_inventory('rev1b', inv, [])
915
# make rev2, with file1 and file2
917
# file1 has 'rev1b' as an ancestor, even though this is not
918
# mentioned by 'rev1a', making it an unreferenced ancestor
919
inv = inventory.Inventory()
920
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
921
self.add_file(repo, inv, 'file2', 'rev2', [])
922
self.add_revision(repo, 'rev2', inv, ['rev1a'])
924
# make ghost revision rev1c
925
inv = inventory.Inventory()
926
self.add_file(repo, inv, 'file2', 'rev1c', [])
928
# make rev3 with file2
929
# file2 refers to 'rev1c', which is a ghost in this repository, so
930
# file2 cannot have rev1c as its ancestor.
931
inv = inventory.Inventory()
932
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
933
self.add_revision(repo, 'rev3', inv, ['rev1c'])
936
for cleanup in reversed(cleanups):
939
def add_revision(self, repo, revision_id, inv, parent_ids):
940
inv.revision_id = revision_id
941
inv.root.revision = revision_id
942
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
943
repo.add_inventory(revision_id, inv, parent_ids)
944
revision = _mod_revision.Revision(revision_id,
945
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
946
timezone=0, message='foo', parent_ids=parent_ids)
947
repo.add_revision(revision_id, revision, inv)
949
def add_file(self, repo, inv, filename, revision, parents):
950
file_id = filename + '-id'
951
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
952
entry.revision = revision
955
text_key = (file_id, revision)
956
parent_keys = [(file_id, parent) for parent in parents]
957
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
959
def test_insert_from_broken_repo(self):
960
"""Inserting a data stream from a broken repository won't silently
961
corrupt the target repository.
963
broken_repo = self.make_broken_repository()
964
empty_repo = self.make_repository('empty-repo')
966
empty_repo.fetch(broken_repo)
967
except (errors.RevisionNotPresent, errors.BzrCheckError):
968
# Test successful: compression parent not being copied leads to
971
empty_repo.lock_read()
972
self.addCleanup(empty_repo.unlock)
973
text = next(empty_repo.texts.get_record_stream(
974
[('file2-id', 'rev3')], 'topological', True))
975
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
978
class TestRepositoryPackCollection(TestCaseWithTransport):
980
def get_format(self):
981
return controldir.format_registry.make_controldir('pack-0.92')
984
format = self.get_format()
985
repo = self.make_repository('.', format=format)
986
return repo._pack_collection
988
def make_packs_and_alt_repo(self, write_lock=False):
989
"""Create a pack repo with 3 packs, and access it via a second repo."""
990
tree = self.make_branch_and_tree('.', format=self.get_format())
992
self.addCleanup(tree.unlock)
993
rev1 = tree.commit('one')
994
rev2 = tree.commit('two')
995
rev3 = tree.commit('three')
996
r = repository.Repository.open('.')
1001
self.addCleanup(r.unlock)
1002
packs = r._pack_collection
1003
packs.ensure_loaded()
1004
return tree, r, packs, [rev1, rev2, rev3]
1006
def test__clear_obsolete_packs(self):
1007
packs = self.get_packs()
1008
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1009
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1010
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1011
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1012
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1013
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1014
res = packs._clear_obsolete_packs()
1015
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1016
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1018
def test__clear_obsolete_packs_preserve(self):
1019
packs = self.get_packs()
1020
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1021
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1022
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1023
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1024
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1025
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1026
res = packs._clear_obsolete_packs(preserve={'a-pack'})
1027
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1028
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1029
sorted(obsolete_pack_trans.list_dir('.')))
1031
def test__max_pack_count(self):
1032
"""The maximum pack count is a function of the number of revisions."""
1033
# no revisions - one pack, so that we can have a revision free repo
1034
# without it blowing up
1035
packs = self.get_packs()
1036
self.assertEqual(1, packs._max_pack_count(0))
1037
# after that the sum of the digits, - check the first 1-9
1038
self.assertEqual(1, packs._max_pack_count(1))
1039
self.assertEqual(2, packs._max_pack_count(2))
1040
self.assertEqual(3, packs._max_pack_count(3))
1041
self.assertEqual(4, packs._max_pack_count(4))
1042
self.assertEqual(5, packs._max_pack_count(5))
1043
self.assertEqual(6, packs._max_pack_count(6))
1044
self.assertEqual(7, packs._max_pack_count(7))
1045
self.assertEqual(8, packs._max_pack_count(8))
1046
self.assertEqual(9, packs._max_pack_count(9))
1047
# check the boundary cases with two digits for the next decade
1048
self.assertEqual(1, packs._max_pack_count(10))
1049
self.assertEqual(2, packs._max_pack_count(11))
1050
self.assertEqual(10, packs._max_pack_count(19))
1051
self.assertEqual(2, packs._max_pack_count(20))
1052
self.assertEqual(3, packs._max_pack_count(21))
1053
# check some arbitrary big numbers
1054
self.assertEqual(25, packs._max_pack_count(112894))
1056
def test_repr(self):
1057
packs = self.get_packs()
1058
self.assertContainsRe(repr(packs),
1059
'RepositoryPackCollection(.*Repository(.*))')
1061
def test__obsolete_packs(self):
1062
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1063
names = packs.names()
1064
pack = packs.get_pack_by_name(names[0])
1065
# Schedule this one for removal
1066
packs._remove_pack_from_memory(pack)
1067
# Simulate a concurrent update by renaming the .pack file and one of
1069
packs.transport.rename('packs/%s.pack' % (names[0],),
1070
'obsolete_packs/%s.pack' % (names[0],))
1071
packs.transport.rename('indices/%s.iix' % (names[0],),
1072
'obsolete_packs/%s.iix' % (names[0],))
1073
# Now trigger the obsoletion, and ensure that all the remaining files
1075
packs._obsolete_packs([pack])
1076
self.assertEqual([n + '.pack' for n in names[1:]],
1077
sorted(packs._pack_transport.list_dir('.')))
1078
# names[0] should not be present in the index anymore
1079
self.assertEqual(names[1:],
1080
sorted({osutils.splitext(n)[0] for n in
1081
packs._index_transport.list_dir('.')}))
1083
def test__obsolete_packs_missing_directory(self):
1084
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1085
r.control_transport.rmdir('obsolete_packs')
1086
names = packs.names()
1087
pack = packs.get_pack_by_name(names[0])
1088
# Schedule this one for removal
1089
packs._remove_pack_from_memory(pack)
1090
# Now trigger the obsoletion, and ensure that all the remaining files
1092
packs._obsolete_packs([pack])
1093
self.assertEqual([n + '.pack' for n in names[1:]],
1094
sorted(packs._pack_transport.list_dir('.')))
1095
# names[0] should not be present in the index anymore
1096
self.assertEqual(names[1:],
1097
sorted({osutils.splitext(n)[0] for n in
1098
packs._index_transport.list_dir('.')}))
1100
def test_pack_distribution_zero(self):
1101
packs = self.get_packs()
1102
self.assertEqual([0], packs.pack_distribution(0))
1104
def test_ensure_loaded_unlocked(self):
1105
packs = self.get_packs()
1106
self.assertRaises(errors.ObjectNotLocked,
1107
packs.ensure_loaded)
1109
def test_pack_distribution_one_to_nine(self):
1110
packs = self.get_packs()
1111
self.assertEqual([1],
1112
packs.pack_distribution(1))
1113
self.assertEqual([1, 1],
1114
packs.pack_distribution(2))
1115
self.assertEqual([1, 1, 1],
1116
packs.pack_distribution(3))
1117
self.assertEqual([1, 1, 1, 1],
1118
packs.pack_distribution(4))
1119
self.assertEqual([1, 1, 1, 1, 1],
1120
packs.pack_distribution(5))
1121
self.assertEqual([1, 1, 1, 1, 1, 1],
1122
packs.pack_distribution(6))
1123
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1124
packs.pack_distribution(7))
1125
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1126
packs.pack_distribution(8))
1127
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1128
packs.pack_distribution(9))
1130
def test_pack_distribution_stable_at_boundaries(self):
1131
"""When there are multi-rev packs the counts are stable."""
1132
packs = self.get_packs()
1134
self.assertEqual([10], packs.pack_distribution(10))
1135
self.assertEqual([10, 1], packs.pack_distribution(11))
1136
self.assertEqual([10, 10], packs.pack_distribution(20))
1137
self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1139
self.assertEqual([100], packs.pack_distribution(100))
1140
self.assertEqual([100, 1], packs.pack_distribution(101))
1141
self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1142
self.assertEqual([100, 100], packs.pack_distribution(200))
1143
self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1144
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1146
def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1147
packs = self.get_packs()
1148
existing_packs = [(2000, "big"), (9, "medium")]
1149
# rev count - 2009 -> 2x1000 + 9x1
1150
pack_operations = packs.plan_autopack_combinations(
1151
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1152
self.assertEqual([], pack_operations)
1154
def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1155
packs = self.get_packs()
1156
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1157
# rev count - 2010 -> 2x1000 + 1x10
1158
pack_operations = packs.plan_autopack_combinations(
1159
existing_packs, [1000, 1000, 10])
1160
self.assertEqual([], pack_operations)
1162
def test_plan_pack_operations_2010_combines_smallest_two(self):
1163
packs = self.get_packs()
1164
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1166
# rev count - 2010 -> 2x1000 + 1x10 (3)
1167
pack_operations = packs.plan_autopack_combinations(
1168
existing_packs, [1000, 1000, 10])
1169
self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1171
def test_plan_pack_operations_creates_a_single_op(self):
1172
packs = self.get_packs()
1173
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1174
(10, 'e'), (6, 'f'), (4, 'g')]
1175
# rev count 150 -> 1x100 and 5x10
1176
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
1177
# be combined into a single 120 size pack, and the 6 & 4 would
1178
# becombined into a size 10 pack. However, if we have to rewrite them,
1179
# we save a pack file with no increased I/O by putting them into the
1181
distribution = packs.pack_distribution(150)
1182
pack_operations = packs.plan_autopack_combinations(existing_packs,
1184
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1186
def test_all_packs_none(self):
1187
format = self.get_format()
1188
tree = self.make_branch_and_tree('.', format=format)
1190
self.addCleanup(tree.unlock)
1191
packs = tree.branch.repository._pack_collection
1192
packs.ensure_loaded()
1193
self.assertEqual([], packs.all_packs())
1195
def test_all_packs_one(self):
1196
format = self.get_format()
1197
tree = self.make_branch_and_tree('.', format=format)
1198
tree.commit('start')
1200
self.addCleanup(tree.unlock)
1201
packs = tree.branch.repository._pack_collection
1202
packs.ensure_loaded()
1204
packs.get_pack_by_name(packs.names()[0])],
1207
def test_all_packs_two(self):
1208
format = self.get_format()
1209
tree = self.make_branch_and_tree('.', format=format)
1210
tree.commit('start')
1211
tree.commit('continue')
1213
self.addCleanup(tree.unlock)
1214
packs = tree.branch.repository._pack_collection
1215
packs.ensure_loaded()
1217
packs.get_pack_by_name(packs.names()[0]),
1218
packs.get_pack_by_name(packs.names()[1]),
1219
], packs.all_packs())
1221
def test_get_pack_by_name(self):
1222
format = self.get_format()
1223
tree = self.make_branch_and_tree('.', format=format)
1224
tree.commit('start')
1226
self.addCleanup(tree.unlock)
1227
packs = tree.branch.repository._pack_collection
1229
packs.ensure_loaded()
1230
name = packs.names()[0]
1231
pack_1 = packs.get_pack_by_name(name)
1232
# the pack should be correctly initialised
1233
sizes = packs._names[name]
1234
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1235
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1236
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1237
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1238
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1239
name, rev_index, inv_index, txt_index, sig_index), pack_1)
1240
# and the same instance should be returned on successive calls.
1241
self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1243
def test_reload_pack_names_new_entry(self):
1244
tree, r, packs, revs = self.make_packs_and_alt_repo()
1245
names = packs.names()
1246
# Add a new pack file into the repository
1247
rev4 = tree.commit('four')
1248
new_names = tree.branch.repository._pack_collection.names()
1249
new_name = set(new_names).difference(names)
1250
self.assertEqual(1, len(new_name))
1251
new_name = new_name.pop()
1252
# The old collection hasn't noticed yet
1253
self.assertEqual(names, packs.names())
1254
self.assertTrue(packs.reload_pack_names())
1255
self.assertEqual(new_names, packs.names())
1256
# And the repository can access the new revision
1257
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1258
self.assertFalse(packs.reload_pack_names())
1260
def test_reload_pack_names_added_and_removed(self):
1261
tree, r, packs, revs = self.make_packs_and_alt_repo()
1262
names = packs.names()
1263
# Now repack the whole thing
1264
tree.branch.repository.pack()
1265
new_names = tree.branch.repository._pack_collection.names()
1266
# The other collection hasn't noticed yet
1267
self.assertEqual(names, packs.names())
1268
self.assertTrue(packs.reload_pack_names())
1269
self.assertEqual(new_names, packs.names())
1270
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1271
self.assertFalse(packs.reload_pack_names())
1273
def test_reload_pack_names_preserves_pending(self):
1274
# TODO: Update this to also test for pending-deleted names
1275
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1276
# We will add one pack (via start_write_group + insert_record_stream),
1277
# and remove another pack (via _remove_pack_from_memory)
1278
orig_names = packs.names()
1279
orig_at_load = packs._packs_at_load
1280
to_remove_name = next(iter(orig_names))
1281
r.start_write_group()
1282
self.addCleanup(r.abort_write_group)
1283
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1284
('text', 'rev'), (), None, 'content\n')])
1285
new_pack = packs._new_pack
1286
self.assertTrue(new_pack.data_inserted())
1288
packs.allocate(new_pack)
1289
packs._new_pack = None
1290
removed_pack = packs.get_pack_by_name(to_remove_name)
1291
packs._remove_pack_from_memory(removed_pack)
1292
names = packs.names()
1293
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1294
new_names = {x[0][0] for x in new_nodes}
1295
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1296
self.assertEqual(set(names) - set(orig_names), new_names)
1297
self.assertEqual({new_pack.name}, new_names)
1298
self.assertEqual([to_remove_name],
1299
sorted([x[0][0] for x in deleted_nodes]))
1300
packs.reload_pack_names()
1301
reloaded_names = packs.names()
1302
self.assertEqual(orig_at_load, packs._packs_at_load)
1303
self.assertEqual(names, reloaded_names)
1304
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1305
new_names = {x[0][0] for x in new_nodes}
1306
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1307
self.assertEqual(set(names) - set(orig_names), new_names)
1308
self.assertEqual({new_pack.name}, new_names)
1309
self.assertEqual([to_remove_name],
1310
sorted([x[0][0] for x in deleted_nodes]))
1312
def test_autopack_obsoletes_new_pack(self):
1313
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1314
packs._max_pack_count = lambda x: 1
1315
packs.pack_distribution = lambda x: [10]
1316
r.start_write_group()
1317
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1318
('bogus-rev',), (), None, 'bogus-content\n')])
1319
# This should trigger an autopack, which will combine everything into a
1321
new_names = r.commit_write_group()
1322
names = packs.names()
1323
self.assertEqual(1, len(names))
1324
self.assertEqual([names[0] + '.pack'],
1325
packs._pack_transport.list_dir('.'))
1327
def test_autopack_reloads_and_stops(self):
1328
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1329
# After we have determined what needs to be autopacked, trigger a
1330
# full-pack via the other repo which will cause us to re-evaluate and
1331
# decide we don't need to do anything
1332
orig_execute = packs._execute_pack_operations
1333
def _munged_execute_pack_ops(*args, **kwargs):
1334
tree.branch.repository.pack()
1335
return orig_execute(*args, **kwargs)
1336
packs._execute_pack_operations = _munged_execute_pack_ops
1337
packs._max_pack_count = lambda x: 1
1338
packs.pack_distribution = lambda x: [10]
1339
self.assertFalse(packs.autopack())
1340
self.assertEqual(1, len(packs.names()))
1341
self.assertEqual(tree.branch.repository._pack_collection.names(),
1344
def test__save_pack_names(self):
1345
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1346
names = packs.names()
1347
pack = packs.get_pack_by_name(names[0])
1348
packs._remove_pack_from_memory(pack)
1349
packs._save_pack_names(obsolete_packs=[pack])
1350
cur_packs = packs._pack_transport.list_dir('.')
1351
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1352
# obsolete_packs will also have stuff like .rix and .iix present.
1353
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1354
obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1355
self.assertEqual([pack.name], sorted(obsolete_names))
1357
def test__save_pack_names_already_obsoleted(self):
1358
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1359
names = packs.names()
1360
pack = packs.get_pack_by_name(names[0])
1361
packs._remove_pack_from_memory(pack)
1362
# We are going to simulate a concurrent autopack by manually obsoleting
1363
# the pack directly.
1364
packs._obsolete_packs([pack])
1365
packs._save_pack_names(clear_obsolete_packs=True,
1366
obsolete_packs=[pack])
1367
cur_packs = packs._pack_transport.list_dir('.')
1368
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1369
# Note that while we set clear_obsolete_packs=True, it should not
1370
# delete a pack file that we have also scheduled for obsoletion.
1371
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1372
obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1373
self.assertEqual([pack.name], sorted(obsolete_names))
1375
def test_pack_no_obsolete_packs_directory(self):
1376
"""Bug #314314, don't fail if obsolete_packs directory does
1378
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1379
r.control_transport.rmdir('obsolete_packs')
1380
packs._clear_obsolete_packs()
1383
class TestPack(TestCaseWithTransport):
1384
"""Tests for the Pack object."""
1386
def assertCurrentlyEqual(self, left, right):
1387
self.assertTrue(left == right)
1388
self.assertTrue(right == left)
1389
self.assertFalse(left != right)
1390
self.assertFalse(right != left)
1392
def assertCurrentlyNotEqual(self, left, right):
1393
self.assertFalse(left == right)
1394
self.assertFalse(right == left)
1395
self.assertTrue(left != right)
1396
self.assertTrue(right != left)
1398
def test___eq____ne__(self):
1399
left = pack_repo.ExistingPack('', '', '', '', '', '')
1400
right = pack_repo.ExistingPack('', '', '', '', '', '')
1401
self.assertCurrentlyEqual(left, right)
1402
# change all attributes and ensure equality changes as we do.
1403
left.revision_index = 'a'
1404
self.assertCurrentlyNotEqual(left, right)
1405
right.revision_index = 'a'
1406
self.assertCurrentlyEqual(left, right)
1407
left.inventory_index = 'a'
1408
self.assertCurrentlyNotEqual(left, right)
1409
right.inventory_index = 'a'
1410
self.assertCurrentlyEqual(left, right)
1411
left.text_index = 'a'
1412
self.assertCurrentlyNotEqual(left, right)
1413
right.text_index = 'a'
1414
self.assertCurrentlyEqual(left, right)
1415
left.signature_index = 'a'
1416
self.assertCurrentlyNotEqual(left, right)
1417
right.signature_index = 'a'
1418
self.assertCurrentlyEqual(left, right)
1420
self.assertCurrentlyNotEqual(left, right)
1422
self.assertCurrentlyEqual(left, right)
1423
left.transport = 'a'
1424
self.assertCurrentlyNotEqual(left, right)
1425
right.transport = 'a'
1426
self.assertCurrentlyEqual(left, right)
1428
def test_file_name(self):
1429
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1430
self.assertEqual('a_name.pack', pack.file_name())
1433
class TestNewPack(TestCaseWithTransport):
1434
"""Tests for pack_repo.NewPack."""
1436
def test_new_instance_attributes(self):
1437
upload_transport = self.get_transport('upload')
1438
pack_transport = self.get_transport('pack')
1439
index_transport = self.get_transport('index')
1440
upload_transport.mkdir('.')
1441
collection = pack_repo.RepositoryPackCollection(
1443
transport=self.get_transport('.'),
1444
index_transport=index_transport,
1445
upload_transport=upload_transport,
1446
pack_transport=pack_transport,
1447
index_builder_class=BTreeBuilder,
1448
index_class=BTreeGraphIndex,
1449
use_chk_index=False)
1450
pack = pack_repo.NewPack(collection)
1451
self.addCleanup(pack.abort) # Make sure the write stream gets closed
1452
self.assertIsInstance(pack.revision_index, BTreeBuilder)
1453
self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1454
self.assertIsInstance(pack._hash, type(osutils.md5()))
1455
self.assertTrue(pack.upload_transport is upload_transport)
1456
self.assertTrue(pack.index_transport is index_transport)
1457
self.assertTrue(pack.pack_transport is pack_transport)
1458
self.assertEqual(None, pack.index_sizes)
1459
self.assertEqual(20, len(pack.random_name))
1460
self.assertIsInstance(pack.random_name, str)
1461
self.assertIsInstance(pack.start_time, float)
1464
class TestPacker(TestCaseWithTransport):
1465
"""Tests for the packs repository Packer class."""
1467
def test_pack_optimizes_pack_order(self):
1468
builder = self.make_branch_builder('.', format="1.9")
1469
builder.start_series()
1470
builder.build_snapshot('A', None, [
1471
('add', ('', 'root-id', 'directory', None)),
1472
('add', ('f', 'f-id', 'file', 'content\n'))])
1473
builder.build_snapshot('B', ['A'],
1474
[('modify', ('f-id', 'new-content\n'))])
1475
builder.build_snapshot('C', ['B'],
1476
[('modify', ('f-id', 'third-content\n'))])
1477
builder.build_snapshot('D', ['C'],
1478
[('modify', ('f-id', 'fourth-content\n'))])
1479
b = builder.get_branch()
1481
builder.finish_series()
1482
self.addCleanup(b.unlock)
1483
# At this point, we should have 4 pack files available
1484
# Because of how they were built, they correspond to
1485
# ['D', 'C', 'B', 'A']
1486
packs = b.repository._pack_collection.packs
1487
packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1489
revision_ids=['B', 'C'])
1490
# Now, when we are copying the B & C revisions, their pack files should
1491
# be moved to the front of the stack
1492
# The new ordering moves B & C to the front of the .packs attribute,
1493
# and leaves the others in the original order.
1494
new_packs = [packs[1], packs[2], packs[0], packs[3]]
1495
new_pack = packer.pack()
1496
self.assertEqual(new_packs, packer.packs)
1499
class TestOptimisingPacker(TestCaseWithTransport):
1500
"""Tests for the OptimisingPacker class."""
1502
def get_pack_collection(self):
1503
repo = self.make_repository('.')
1504
return repo._pack_collection
1506
def test_open_pack_will_optimise(self):
1507
packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1509
new_pack = packer.open_pack()
1510
self.addCleanup(new_pack.abort) # ensure cleanup
1511
self.assertIsInstance(new_pack, pack_repo.NewPack)
1512
self.assertTrue(new_pack.revision_index._optimize_for_size)
1513
self.assertTrue(new_pack.inventory_index._optimize_for_size)
1514
self.assertTrue(new_pack.text_index._optimize_for_size)
1515
self.assertTrue(new_pack.signature_index._optimize_for_size)
1518
class TestGCCHKPacker(TestCaseWithTransport):
1520
def make_abc_branch(self):
1521
builder = self.make_branch_builder('source')
1522
builder.start_series()
1523
builder.build_snapshot('A', None, [
1524
('add', ('', 'root-id', 'directory', None)),
1525
('add', ('file', 'file-id', 'file', 'content\n')),
1527
builder.build_snapshot('B', ['A'], [
1528
('add', ('dir', 'dir-id', 'directory', None))])
1529
builder.build_snapshot('C', ['B'], [
1530
('modify', ('file-id', 'new content\n'))])
1531
builder.finish_series()
1532
return builder.get_branch()
1534
def make_branch_with_disjoint_inventory_and_revision(self):
1535
"""a repo with separate packs for a revisions Revision and Inventory.
1537
There will be one pack file that holds the Revision content, and one
1538
for the Inventory content.
1540
:return: (repository,
1541
pack_name_with_rev_A_Revision,
1542
pack_name_with_rev_A_Inventory,
1543
pack_name_with_rev_C_content)
1545
b_source = self.make_abc_branch()
1546
b_base = b_source.controldir.sprout('base', revision_id='A').open_branch()
1547
b_stacked = b_base.controldir.sprout('stacked', stacked=True).open_branch()
1548
b_stacked.lock_write()
1549
self.addCleanup(b_stacked.unlock)
1550
b_stacked.fetch(b_source, 'B')
1551
# Now re-open the stacked repo directly (no fallbacks) so that we can
1552
# fill in the A rev.
1553
repo_not_stacked = b_stacked.controldir.open_repository()
1554
repo_not_stacked.lock_write()
1555
self.addCleanup(repo_not_stacked.unlock)
1556
# Now we should have a pack file with A's inventory, but not its
1558
self.assertEqual([('A',), ('B',)],
1559
sorted(repo_not_stacked.inventories.keys()))
1560
self.assertEqual([('B',)],
1561
sorted(repo_not_stacked.revisions.keys()))
1562
stacked_pack_names = repo_not_stacked._pack_collection.names()
1563
# We have a couple names here, figure out which has A's inventory
1564
for name in stacked_pack_names:
1565
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1566
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1568
inv_a_pack_name = name
1571
self.fail('Could not find pack containing A\'s inventory')
1572
repo_not_stacked.fetch(b_source.repository, 'A')
1573
self.assertEqual([('A',), ('B',)],
1574
sorted(repo_not_stacked.revisions.keys()))
1575
new_pack_names = set(repo_not_stacked._pack_collection.names())
1576
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1577
self.assertEqual(1, len(rev_a_pack_names))
1578
rev_a_pack_name = list(rev_a_pack_names)[0]
1579
# Now fetch 'C', so we have a couple pack files to join
1580
repo_not_stacked.fetch(b_source.repository, 'C')
1581
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1582
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1583
self.assertEqual(1, len(rev_c_pack_names))
1584
rev_c_pack_name = list(rev_c_pack_names)[0]
1585
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1588
def test_pack_with_distant_inventories(self):
1589
# See https://bugs.launchpad.net/bzr/+bug/437003
1590
# When repacking, it is possible to have an inventory in a different
1591
# pack file than the associated revision. An autopack can then come
1592
# along, and miss that inventory, and complain.
1593
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1594
) = self.make_branch_with_disjoint_inventory_and_revision()
1595
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1596
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1597
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1598
[a_pack, c_pack], '.test-pack')
1599
# This would raise ValueError in bug #437003, but should not raise an
1603
def test_pack_with_missing_inventory(self):
1604
# Similar to test_pack_with_missing_inventory, but this time, we force
1605
# the A inventory to actually be gone from the repository.
1606
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1607
) = self.make_branch_with_disjoint_inventory_and_revision()
1608
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1609
repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1610
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1611
repo._pack_collection.all_packs(), '.test-pack')
1612
e = self.assertRaises(ValueError, packer.pack)
1613
packer.new_pack.abort()
1614
self.assertContainsRe(str(e),
1615
r"We are missing inventories for revisions: .*'A'")
1618
class TestCrossFormatPacks(TestCaseWithTransport):
1620
def log_pack(self, hint=None):
1621
self.calls.append(('pack', hint))
1622
self.orig_pack(hint=hint)
1623
if self.expect_hint:
1624
self.assertTrue(hint)
1626
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1627
self.expect_hint = expect_pack_called
1629
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1630
source_tree.lock_write()
1631
self.addCleanup(source_tree.unlock)
1632
tip = source_tree.commit('foo')
1633
target = self.make_repository('target', format=target_fmt)
1635
self.addCleanup(target.unlock)
1636
source = source_tree.branch.repository._get_source(target._format)
1637
self.orig_pack = target.pack
1638
self.overrideAttr(target, "pack", self.log_pack)
1639
search = target.search_missing_revision_ids(
1640
source_tree.branch.repository, revision_ids=[tip])
1641
stream = source.get_stream(search)
1642
from_format = source_tree.branch.repository._format
1643
sink = target._get_sink()
1644
sink.insert_stream(stream, from_format, [])
1645
if expect_pack_called:
1646
self.assertLength(1, self.calls)
1648
self.assertLength(0, self.calls)
1650
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1651
self.expect_hint = expect_pack_called
1653
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1654
source_tree.lock_write()
1655
self.addCleanup(source_tree.unlock)
1656
tip = source_tree.commit('foo')
1657
target = self.make_repository('target', format=target_fmt)
1659
self.addCleanup(target.unlock)
1660
source = source_tree.branch.repository
1661
self.orig_pack = target.pack
1662
self.overrideAttr(target, "pack", self.log_pack)
1663
target.fetch(source)
1664
if expect_pack_called:
1665
self.assertLength(1, self.calls)
1667
self.assertLength(0, self.calls)
1669
def test_sink_format_hint_no(self):
1670
# When the target format says packing makes no difference, pack is not
1672
self.run_stream('1.9', 'rich-root-pack', False)
1674
def test_sink_format_hint_yes(self):
1675
# When the target format says packing makes a difference, pack is
1677
self.run_stream('1.9', '2a', True)
1679
def test_sink_format_same_no(self):
1680
# When the formats are the same, pack is not called.
1681
self.run_stream('2a', '2a', False)
1683
def test_IDS_format_hint_no(self):
1684
# When the target format says packing makes no difference, pack is not
1686
self.run_fetch('1.9', 'rich-root-pack', False)
1688
def test_IDS_format_hint_yes(self):
1689
# When the target format says packing makes a difference, pack is
1691
self.run_fetch('1.9', '2a', True)
1693
def test_IDS_format_same_no(self):
1694
# When the formats are the same, pack is not called.
1695
self.run_fetch('2a', '2a', False)
1698
class Test_LazyListJoin(tests.TestCase):
1700
def test__repr__(self):
1701
lazy = repository._LazyListJoin(['a'], ['b'])
1702
self.assertEqual("breezy.repository._LazyListJoin((['a'], ['b']))",
1706
class TestFeatures(tests.TestCaseWithTransport):
1708
def test_open_with_present_feature(self):
1710
bzrrepository.RepositoryFormatMetaDir.unregister_feature,
1711
"makes-cheese-sandwich")
1712
bzrrepository.RepositoryFormatMetaDir.register_feature(
1713
"makes-cheese-sandwich")
1714
repo = self.make_repository('.')
1716
repo._format.features["makes-cheese-sandwich"] = "required"
1717
repo._format.check_support_status(False)
1720
def test_open_with_missing_required_feature(self):
1721
repo = self.make_repository('.')
1723
repo._format.features["makes-cheese-sandwich"] = "required"
1724
self.assertRaises(bzrdir.MissingFeature,
1725
repo._format.check_support_status, False)