1
# Copyright (C) 2006-2012, 2016 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for the Repository facility that are not interface tests.
19
For interface tests see tests/per_repository/*.py.
21
For concrete class tests see this file, and for storage formats tests
25
from stat import S_ISDIR
28
from breezy.errors import (
35
from breezy.bzr import (
39
repository as bzrrepository,
44
from breezy.bzr.btree_index import BTreeBuilder, BTreeGraphIndex
45
from breezy.bzr.index import GraphIndex
46
from breezy.repository import RepositoryFormat
47
from breezy.tests import (
49
TestCaseWithTransport,
56
revision as _mod_revision,
60
from breezy.bzr import (
68
class TestDefaultFormat(TestCase):
70
def test_get_set_default_format(self):
71
old_default = controldir.format_registry.get('default')
72
private_default = old_default().repository_format.__class__
73
old_format = repository.format_registry.get_default()
74
self.assertTrue(isinstance(old_format, private_default))
76
def make_sample_bzrdir():
77
my_bzrdir = bzrdir.BzrDirMetaFormat1()
78
my_bzrdir.repository_format = SampleRepositoryFormat()
80
controldir.format_registry.remove('default')
81
controldir.format_registry.register('sample', make_sample_bzrdir, '')
82
controldir.format_registry.set_default('sample')
83
# creating a repository should now create an instrumented dir.
85
# the default branch format is used by the meta dir format
86
# which is not the default bzrdir format at this point
87
dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
88
result = dir.create_repository()
89
self.assertEqual(result, 'A bzr repository dir')
91
controldir.format_registry.remove('default')
92
controldir.format_registry.remove('sample')
93
controldir.format_registry.register('default', old_default, '')
94
self.assertIsInstance(repository.format_registry.get_default(),
98
class SampleRepositoryFormat(bzrrepository.RepositoryFormatMetaDir):
101
this format is initializable, unsupported to aid in testing the
102
open and open(unsupported=True) routines.
106
def get_format_string(cls):
107
"""See RepositoryFormat.get_format_string()."""
108
return b"Sample .bzr repository format."
110
def initialize(self, a_controldir, shared=False):
111
"""Initialize a repository in a BzrDir"""
112
t = a_controldir.get_repository_transport(self)
113
t.put_bytes('format', self.get_format_string())
114
return 'A bzr repository dir'
116
def is_supported(self):
119
def open(self, a_controldir, _found=False):
120
return "opened repository."
123
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
124
"""A sample format that can not be used in a metadir
128
def get_format_string(self):
129
raise NotImplementedError
132
class TestRepositoryFormat(TestCaseWithTransport):
133
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
135
def test_find_format(self):
136
# is the right format object found for a repository?
137
# create a branch with a few known format objects.
138
# this is not quite the same as
139
self.build_tree(["foo/", "bar/"])
141
def check_format(format, url):
142
dir = format._matchingcontroldir.initialize(url)
143
format.initialize(dir)
144
found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
146
self.assertIsInstance(found_format, format.__class__)
147
check_format(repository.format_registry.get_default(), "bar")
149
def test_find_format_no_repository(self):
150
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
151
self.assertRaises(errors.NoRepositoryPresent,
152
bzrrepository.RepositoryFormatMetaDir.find_format,
155
def test_from_string(self):
156
self.assertIsInstance(
157
SampleRepositoryFormat.from_string(
158
b"Sample .bzr repository format."),
159
SampleRepositoryFormat)
160
self.assertRaises(AssertionError,
161
SampleRepositoryFormat.from_string,
162
b"Different .bzr repository format.")
164
def test_find_format_unknown_format(self):
165
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
166
SampleRepositoryFormat().initialize(dir)
167
self.assertRaises(UnknownFormatError,
168
bzrrepository.RepositoryFormatMetaDir.find_format,
171
def test_find_format_with_features(self):
172
tree = self.make_branch_and_tree('.', format='2a')
173
tree.branch.repository.update_feature_flags({b"name": b"necessity"})
174
found_format = bzrrepository.RepositoryFormatMetaDir.find_format(
176
self.assertIsInstance(
177
found_format, bzrrepository.RepositoryFormatMetaDir)
178
self.assertEqual(found_format.features.get(b"name"), b"necessity")
180
bzrdir.MissingFeature, found_format.check_support_status, True)
182
bzrrepository.RepositoryFormatMetaDir.unregister_feature, b"name")
183
bzrrepository.RepositoryFormatMetaDir.register_feature(b"name")
184
found_format.check_support_status(True)
187
class TestRepositoryFormatRegistry(TestCase):
190
super(TestRepositoryFormatRegistry, self).setUp()
191
self.registry = repository.RepositoryFormatRegistry()
193
def test_register_unregister_format(self):
194
format = SampleRepositoryFormat()
195
self.registry.register(format)
196
self.assertEqual(format, self.registry.get(
197
b"Sample .bzr repository format."))
198
self.registry.remove(format)
199
self.assertRaises(KeyError, self.registry.get,
200
b"Sample .bzr repository format.")
202
def test_get_all(self):
203
format = SampleRepositoryFormat()
204
self.assertEqual([], self.registry._get_all())
205
self.registry.register(format)
206
self.assertEqual([format], self.registry._get_all())
208
def test_register_extra(self):
209
format = SampleExtraRepositoryFormat()
210
self.assertEqual([], self.registry._get_all())
211
self.registry.register_extra(format)
212
self.assertEqual([format], self.registry._get_all())
214
def test_register_extra_lazy(self):
215
self.assertEqual([], self.registry._get_all())
216
self.registry.register_extra_lazy("breezy.tests.test_repository",
217
"SampleExtraRepositoryFormat")
218
formats = self.registry._get_all()
219
self.assertEqual(1, len(formats))
220
self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
223
class TestFormatKnit1(TestCaseWithTransport):
225
def test_attribute__fetch_order(self):
226
"""Knits need topological data insertion."""
227
repo = self.make_repository(
228
'.', format=controldir.format_registry.get('knit')())
229
self.assertEqual('topological', repo._format._fetch_order)
231
def test_attribute__fetch_uses_deltas(self):
232
"""Knits reuse deltas."""
233
repo = self.make_repository(
234
'.', format=controldir.format_registry.get('knit')())
235
self.assertEqual(True, repo._format._fetch_uses_deltas)
237
def test_disk_layout(self):
238
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
239
repo = knitrepo.RepositoryFormatKnit1().initialize(control)
240
# in case of side effects of locking.
244
# format 'Bazaar-NG Knit Repository Format 1'
245
# lock: is a directory
246
# inventory.weave == empty_weave
247
# empty revision-store directory
248
# empty weaves directory
249
t = control.get_repository_transport(None)
250
with t.get('format') as f:
251
self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
253
# XXX: no locks left when unlocked at the moment
254
# self.assertEqualDiff('', t.get('lock').read())
255
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
257
# Check per-file knits.
258
control.create_branch()
259
tree = control.create_workingtree()
260
tree.add(['foo'], [b'Nasty-IdC:'], ['file'])
261
tree.put_file_bytes_non_atomic('foo', b'')
262
tree.commit('1st post', rev_id=b'foo')
263
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
264
b'\nfoo fulltext 0 81 :')
266
def assertHasKnit(self, t, knit_name, extra_content=b''):
267
"""Assert that knit_name exists on t."""
268
self.assertEqualDiff(b'# bzr knit index 8\n' + extra_content,
269
t.get(knit_name + '.kndx').read())
271
def check_knits(self, t):
272
"""check knit content for a repository."""
273
self.assertHasKnit(t, 'inventory')
274
self.assertHasKnit(t, 'revisions')
275
self.assertHasKnit(t, 'signatures')
277
def test_shared_disk_layout(self):
278
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
279
knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
281
# format 'Bazaar-NG Knit Repository Format 1'
282
# lock: is a directory
283
# inventory.weave == empty_weave
284
# empty revision-store directory
285
# empty weaves directory
286
# a 'shared-storage' marker file.
287
t = control.get_repository_transport(None)
288
with t.get('format') as f:
289
self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
291
# XXX: no locks left when unlocked at the moment
292
# self.assertEqualDiff('', t.get('lock').read())
293
self.assertEqualDiff(b'', t.get('shared-storage').read())
294
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
297
def test_shared_no_tree_disk_layout(self):
298
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
299
repo = knitrepo.RepositoryFormatKnit1().initialize(
300
control, shared=True)
301
repo.set_make_working_trees(False)
303
# format 'Bazaar-NG Knit Repository Format 1'
305
# inventory.weave == empty_weave
306
# empty revision-store directory
307
# empty weaves directory
308
# a 'shared-storage' marker file.
309
t = control.get_repository_transport(None)
310
with t.get('format') as f:
311
self.assertEqualDiff(b'Bazaar-NG Knit Repository Format 1',
313
# XXX: no locks left when unlocked at the moment
314
# self.assertEqualDiff('', t.get('lock').read())
315
self.assertEqualDiff(b'', t.get('shared-storage').read())
316
self.assertEqualDiff(b'', t.get('no-working-trees').read())
317
repo.set_make_working_trees(True)
318
self.assertFalse(t.has('no-working-trees'))
319
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
322
def test_deserialise_sets_root_revision(self):
323
"""We must have a inventory.root.revision
325
Old versions of the XML5 serializer did not set the revision_id for
326
the whole inventory. So we grab the one from the expected text. Which
327
is valid when the api is not being abused.
329
repo = self.make_repository(
330
'.', format=controldir.format_registry.get('knit')())
331
inv_xml = b'<inventory format="5">\n</inventory>\n'
332
inv = repo._deserialise_inventory(b'test-rev-id', [inv_xml])
333
self.assertEqual(b'test-rev-id', inv.root.revision)
335
def test_deserialise_uses_global_revision_id(self):
336
"""If it is set, then we re-use the global revision id"""
337
repo = self.make_repository(
338
'.', format=controldir.format_registry.get('knit')())
339
inv_xml = (b'<inventory format="5" revision_id="other-rev-id">\n'
341
# Arguably, the deserialise_inventory should detect a mismatch, and
342
# raise an error, rather than silently using one revision_id over the
344
self.assertRaises(AssertionError, repo._deserialise_inventory,
345
b'test-rev-id', [inv_xml])
346
inv = repo._deserialise_inventory(b'other-rev-id', [inv_xml])
347
self.assertEqual(b'other-rev-id', inv.root.revision)
349
def test_supports_external_lookups(self):
350
repo = self.make_repository(
351
'.', format=controldir.format_registry.get('knit')())
352
self.assertFalse(repo._format.supports_external_lookups)
355
class DummyRepository(object):
356
"""A dummy repository for testing."""
361
def supports_rich_root(self):
362
if self._format is not None:
363
return self._format.rich_root_data
367
raise NotImplementedError
369
def get_parent_map(self, revision_ids):
370
raise NotImplementedError
373
class InterDummy(repository.InterRepository):
374
"""An inter-repository optimised code path for DummyRepository.
376
This is for use during testing where we use DummyRepository as repositories
377
so that none of the default regsitered inter-repository classes will
382
def is_compatible(repo_source, repo_target):
383
"""InterDummy is compatible with DummyRepository."""
384
return (isinstance(repo_source, DummyRepository) and
385
isinstance(repo_target, DummyRepository))
388
class TestInterRepository(TestCaseWithTransport):
390
def test_get_default_inter_repository(self):
391
# test that the InterRepository.get(repo_a, repo_b) probes
392
# for a inter_repo class where is_compatible(repo_a, repo_b) returns
393
# true and returns a default inter_repo otherwise.
394
# This also tests that the default registered optimised interrepository
395
# classes do not barf inappropriately when a surprising repository type
397
dummy_a = DummyRepository()
398
dummy_a._format = RepositoryFormat()
399
dummy_a._format.supports_full_versioned_files = True
400
dummy_b = DummyRepository()
401
dummy_b._format = RepositoryFormat()
402
dummy_b._format.supports_full_versioned_files = True
403
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
405
def assertGetsDefaultInterRepository(self, repo_a, repo_b):
406
"""Asserts that InterRepository.get(repo_a, repo_b) -> the default.
408
The effective default is now InterSameDataRepository because there is
409
no actual sane default in the presence of incompatible data models.
411
inter_repo = repository.InterRepository.get(repo_a, repo_b)
412
self.assertEqual(vf_repository.InterSameDataRepository,
413
inter_repo.__class__)
414
self.assertEqual(repo_a, inter_repo.source)
415
self.assertEqual(repo_b, inter_repo.target)
417
def test_register_inter_repository_class(self):
418
# test that a optimised code path provider - a
419
# InterRepository subclass can be registered and unregistered
420
# and that it is correctly selected when given a repository
421
# pair that it returns true on for the is_compatible static method
423
dummy_a = DummyRepository()
424
dummy_a._format = RepositoryFormat()
425
dummy_b = DummyRepository()
426
dummy_b._format = RepositoryFormat()
427
repo = self.make_repository('.')
428
# hack dummies to look like repo somewhat.
429
dummy_a._serializer = repo._serializer
430
dummy_a._format.supports_tree_reference = (
431
repo._format.supports_tree_reference)
432
dummy_a._format.rich_root_data = repo._format.rich_root_data
433
dummy_a._format.supports_full_versioned_files = (
434
repo._format.supports_full_versioned_files)
435
dummy_b._serializer = repo._serializer
436
dummy_b._format.supports_tree_reference = (
437
repo._format.supports_tree_reference)
438
dummy_b._format.rich_root_data = repo._format.rich_root_data
439
dummy_b._format.supports_full_versioned_files = (
440
repo._format.supports_full_versioned_files)
441
repository.InterRepository.register_optimiser(InterDummy)
443
# we should get the default for something InterDummy returns False
445
self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
446
self.assertGetsDefaultInterRepository(dummy_a, repo)
447
# and we should get an InterDummy for a pair it 'likes'
448
self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
449
inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
450
self.assertEqual(InterDummy, inter_repo.__class__)
451
self.assertEqual(dummy_a, inter_repo.source)
452
self.assertEqual(dummy_b, inter_repo.target)
454
repository.InterRepository.unregister_optimiser(InterDummy)
455
# now we should get the default InterRepository object again.
456
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
459
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
462
def get_format_string(cls):
463
return b"Test Format 1"
466
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
469
def get_format_string(cls):
470
return b"Test Format 2"
473
class TestRepositoryConverter(TestCaseWithTransport):
475
def test_convert_empty(self):
476
source_format = TestRepositoryFormat1()
477
target_format = TestRepositoryFormat2()
478
repository.format_registry.register(source_format)
479
self.addCleanup(repository.format_registry.remove,
481
repository.format_registry.register(target_format)
482
self.addCleanup(repository.format_registry.remove,
484
t = self.get_transport()
485
t.mkdir('repository')
486
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
487
repo = TestRepositoryFormat1().initialize(repo_dir)
488
converter = repository.CopyConverter(target_format)
489
with breezy.ui.ui_factory.nested_progress_bar() as pb:
490
converter.convert(repo, pb)
491
repo = repo_dir.open_repository()
492
self.assertTrue(isinstance(target_format, repo._format.__class__))
495
class TestRepositoryFormatKnit3(TestCaseWithTransport):
497
def test_attribute__fetch_order(self):
498
"""Knits need topological data insertion."""
499
format = bzrdir.BzrDirMetaFormat1()
500
format.repository_format = knitrepo.RepositoryFormatKnit3()
501
repo = self.make_repository('.', format=format)
502
self.assertEqual('topological', repo._format._fetch_order)
504
def test_attribute__fetch_uses_deltas(self):
505
"""Knits reuse deltas."""
506
format = bzrdir.BzrDirMetaFormat1()
507
format.repository_format = knitrepo.RepositoryFormatKnit3()
508
repo = self.make_repository('.', format=format)
509
self.assertEqual(True, repo._format._fetch_uses_deltas)
511
def test_convert(self):
512
"""Ensure the upgrade adds weaves for roots"""
513
format = bzrdir.BzrDirMetaFormat1()
514
format.repository_format = knitrepo.RepositoryFormatKnit1()
515
tree = self.make_branch_and_tree('.', format)
516
tree.commit("Dull commit", rev_id=b"dull")
517
revision_tree = tree.branch.repository.revision_tree(b'dull')
518
with revision_tree.lock_read():
520
errors.NoSuchFile, revision_tree.get_file_lines, u'')
521
format = bzrdir.BzrDirMetaFormat1()
522
format.repository_format = knitrepo.RepositoryFormatKnit3()
523
upgrade.Convert('.', format)
524
tree = workingtree.WorkingTree.open('.')
525
revision_tree = tree.branch.repository.revision_tree(b'dull')
526
with revision_tree.lock_read():
527
revision_tree.get_file_lines(u'')
528
tree.commit("Another dull commit", rev_id=b'dull2')
529
revision_tree = tree.branch.repository.revision_tree(b'dull2')
530
revision_tree.lock_read()
531
self.addCleanup(revision_tree.unlock)
532
self.assertEqual(b'dull', revision_tree.get_file_revision(u''))
534
def test_supports_external_lookups(self):
535
format = bzrdir.BzrDirMetaFormat1()
536
format.repository_format = knitrepo.RepositoryFormatKnit3()
537
repo = self.make_repository('.', format=format)
538
self.assertFalse(repo._format.supports_external_lookups)
541
class Test2a(tests.TestCaseWithMemoryTransport):
543
def test_chk_bytes_uses_custom_btree_parser(self):
544
mt = self.make_branch_and_memory_tree('test', format='2a')
546
self.addCleanup(mt.unlock)
547
mt.add([''], [b'root-id'])
549
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
550
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
551
# It should also work if we re-open the repo
552
repo = mt.branch.repository.controldir.open_repository()
554
self.addCleanup(repo.unlock)
555
index = repo.chk_bytes._index._graph_index._indices[0]
556
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
558
def test_fetch_combines_groups(self):
559
builder = self.make_branch_builder('source', format='2a')
560
builder.start_series()
561
builder.build_snapshot(None, [
562
('add', ('', b'root-id', 'directory', '')),
563
('add', ('file', b'file-id', 'file', b'content\n'))],
565
builder.build_snapshot([b'1'], [
566
('modify', ('file', b'content-2\n'))],
568
builder.finish_series()
569
source = builder.get_branch()
570
target = self.make_repository('target', format='2a')
571
target.fetch(source.repository)
573
self.addCleanup(target.unlock)
574
details = target.texts._index.get_build_details(
575
[(b'file-id', b'1',), (b'file-id', b'2',)])
576
file_1_details = details[(b'file-id', b'1')]
577
file_2_details = details[(b'file-id', b'2')]
578
# The index, and what to read off disk, should be the same for both
579
# versions of the file.
580
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
582
def test_fetch_combines_groups(self):
583
builder = self.make_branch_builder('source', format='2a')
584
builder.start_series()
585
builder.build_snapshot(None, [
586
('add', ('', b'root-id', 'directory', '')),
587
('add', ('file', b'file-id', 'file', b'content\n'))],
589
builder.build_snapshot([b'1'], [
590
('modify', ('file', b'content-2\n'))],
592
builder.finish_series()
593
source = builder.get_branch()
594
target = self.make_repository('target', format='2a')
595
target.fetch(source.repository)
597
self.addCleanup(target.unlock)
598
details = target.texts._index.get_build_details(
599
[(b'file-id', b'1',), (b'file-id', b'2',)])
600
file_1_details = details[(b'file-id', b'1')]
601
file_2_details = details[(b'file-id', b'2')]
602
# The index, and what to read off disk, should be the same for both
603
# versions of the file.
604
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
606
def test_fetch_combines_groups(self):
607
builder = self.make_branch_builder('source', format='2a')
608
builder.start_series()
609
builder.build_snapshot(None, [
610
('add', ('', b'root-id', 'directory', '')),
611
('add', ('file', b'file-id', 'file', b'content\n'))],
613
builder.build_snapshot([b'1'], [
614
('modify', ('file', b'content-2\n'))],
616
builder.finish_series()
617
source = builder.get_branch()
618
target = self.make_repository('target', format='2a')
619
target.fetch(source.repository)
621
self.addCleanup(target.unlock)
622
details = target.texts._index.get_build_details(
623
[(b'file-id', b'1',), (b'file-id', b'2',)])
624
file_1_details = details[(b'file-id', b'1')]
625
file_2_details = details[(b'file-id', b'2')]
626
# The index, and what to read off disk, should be the same for both
627
# versions of the file.
628
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
630
def test_format_pack_compresses_True(self):
631
repo = self.make_repository('repo', format='2a')
632
self.assertTrue(repo._format.pack_compresses)
634
def test_inventories_use_chk_map_with_parent_base_dict(self):
635
tree = self.make_branch_and_memory_tree('repo', format="2a")
637
tree.add([''], [b'TREE_ROOT'])
638
revid = tree.commit("foo")
641
self.addCleanup(tree.unlock)
642
inv = tree.branch.repository.get_inventory(revid)
643
self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
644
inv.parent_id_basename_to_file_id._ensure_root()
645
inv.id_to_entry._ensure_root()
646
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
648
65536, inv.parent_id_basename_to_file_id._root_node.maximum_size)
650
def test_autopack_unchanged_chk_nodes(self):
651
# at 20 unchanged commits, chk pages are packed that are split into
652
# two groups such that the new pack being made doesn't have all its
653
# pages in the source packs (though they are in the repository).
654
# Use a memory backed repository, we don't need to hit disk for this
655
tree = self.make_branch_and_memory_tree('tree', format='2a')
657
self.addCleanup(tree.unlock)
658
tree.add([''], [b'TREE_ROOT'])
659
for pos in range(20):
660
tree.commit(str(pos))
662
def test_pack_with_hint(self):
663
tree = self.make_branch_and_memory_tree('tree', format='2a')
665
self.addCleanup(tree.unlock)
666
tree.add([''], [b'TREE_ROOT'])
667
# 1 commit to leave untouched
669
to_keep = tree.branch.repository._pack_collection.names()
673
all = tree.branch.repository._pack_collection.names()
674
combine = list(set(all) - set(to_keep))
675
self.assertLength(3, all)
676
self.assertLength(2, combine)
677
tree.branch.repository.pack(hint=combine)
678
final = tree.branch.repository._pack_collection.names()
679
self.assertLength(2, final)
680
self.assertFalse(combine[0] in final)
681
self.assertFalse(combine[1] in final)
682
self.assertSubset(to_keep, final)
684
def test_stream_source_to_gc(self):
685
source = self.make_repository('source', format='2a')
686
target = self.make_repository('target', format='2a')
687
stream = source._get_source(target._format)
688
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
690
def test_stream_source_to_non_gc(self):
691
source = self.make_repository('source', format='2a')
692
target = self.make_repository('target', format='rich-root-pack')
693
stream = source._get_source(target._format)
694
# We don't want the child GroupCHKStreamSource
695
self.assertIs(type(stream), vf_repository.StreamSource)
697
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
698
source_builder = self.make_branch_builder('source',
700
# We have to build a fairly large tree, so that we are sure the chk
701
# pages will have split into multiple pages.
702
entries = [('add', ('', b'a-root-id', 'directory', None))]
703
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
704
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
706
fid = fname.encode('utf-8') + b'-id'
707
content = b'content for %s\n' % (fname.encode('utf-8'),)
708
entries.append(('add', (fname, fid, 'file', content)))
709
source_builder.start_series()
710
source_builder.build_snapshot(None, entries, revision_id=b'rev-1')
711
# Now change a few of them, so we get a few new pages for the second
713
source_builder.build_snapshot([b'rev-1'], [
714
('modify', ('aa', b'new content for aa-id\n')),
715
('modify', ('cc', b'new content for cc-id\n')),
716
('modify', ('zz', b'new content for zz-id\n')),
717
], revision_id=b'rev-2')
718
source_builder.finish_series()
719
source_branch = source_builder.get_branch()
720
source_branch.lock_read()
721
self.addCleanup(source_branch.unlock)
722
target = self.make_repository('target', format='2a')
723
source = source_branch.repository._get_source(target._format)
724
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
726
# On a regular pass, getting the inventories and chk pages for rev-2
727
# would only get the newly created chk pages
728
search = vf_search.SearchResult({b'rev-2'}, {b'rev-1'}, 1,
730
simple_chk_records = set()
731
for vf_name, substream in source.get_stream(search):
732
if vf_name == 'chk_bytes':
733
for record in substream:
734
simple_chk_records.add(record.key)
738
# 3 pages, the root (InternalNode), + 2 pages which actually changed
739
self.assertEqual({(b'sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
740
(b'sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
741
(b'sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
742
(b'sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)},
743
set(simple_chk_records))
744
# Now, when we do a similar call using 'get_stream_for_missing_keys'
745
# we should get a much larger set of pages.
746
missing = [('inventories', b'rev-2')]
747
full_chk_records = set()
748
for vf_name, substream in source.get_stream_for_missing_keys(missing):
749
if vf_name == 'inventories':
750
for record in substream:
751
self.assertEqual((b'rev-2',), record.key)
752
elif vf_name == 'chk_bytes':
753
for record in substream:
754
full_chk_records.add(record.key)
756
self.fail('Should not be getting a stream of %s' % (vf_name,))
757
# We have 257 records now. This is because we have 1 root page, and 256
758
# leaf pages in a complete listing.
759
self.assertEqual(257, len(full_chk_records))
760
self.assertSubset(simple_chk_records, full_chk_records)
762
def test_inconsistency_fatal(self):
763
repo = self.make_repository('repo', format='2a')
764
self.assertTrue(repo.revisions._index._inconsistency_fatal)
765
self.assertFalse(repo.texts._index._inconsistency_fatal)
766
self.assertFalse(repo.inventories._index._inconsistency_fatal)
767
self.assertFalse(repo.signatures._index._inconsistency_fatal)
768
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
771
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
773
def test_source_to_exact_pack_092(self):
774
source = self.make_repository('source', format='pack-0.92')
775
target = self.make_repository('target', format='pack-0.92')
776
stream_source = source._get_source(target._format)
777
self.assertIsInstance(
778
stream_source, knitpack_repo.KnitPackStreamSource)
780
def test_source_to_exact_pack_rich_root_pack(self):
781
source = self.make_repository('source', format='rich-root-pack')
782
target = self.make_repository('target', format='rich-root-pack')
783
stream_source = source._get_source(target._format)
784
self.assertIsInstance(
785
stream_source, knitpack_repo.KnitPackStreamSource)
787
def test_source_to_exact_pack_19(self):
788
source = self.make_repository('source', format='1.9')
789
target = self.make_repository('target', format='1.9')
790
stream_source = source._get_source(target._format)
791
self.assertIsInstance(
792
stream_source, knitpack_repo.KnitPackStreamSource)
794
def test_source_to_exact_pack_19_rich_root(self):
795
source = self.make_repository('source', format='1.9-rich-root')
796
target = self.make_repository('target', format='1.9-rich-root')
797
stream_source = source._get_source(target._format)
798
self.assertIsInstance(
799
stream_source, knitpack_repo.KnitPackStreamSource)
801
def test_source_to_remote_exact_pack_19(self):
802
trans = self.make_smart_server('target')
804
source = self.make_repository('source', format='1.9')
805
target = self.make_repository('target', format='1.9')
806
target = repository.Repository.open(trans.base)
807
stream_source = source._get_source(target._format)
808
self.assertIsInstance(
809
stream_source, knitpack_repo.KnitPackStreamSource)
811
def test_stream_source_to_non_exact(self):
812
source = self.make_repository('source', format='pack-0.92')
813
target = self.make_repository('target', format='1.9')
814
stream = source._get_source(target._format)
815
self.assertIs(type(stream), vf_repository.StreamSource)
817
def test_stream_source_to_non_exact_rich_root(self):
818
source = self.make_repository('source', format='1.9')
819
target = self.make_repository('target', format='1.9-rich-root')
820
stream = source._get_source(target._format)
821
self.assertIs(type(stream), vf_repository.StreamSource)
823
def test_source_to_remote_non_exact_pack_19(self):
824
trans = self.make_smart_server('target')
826
source = self.make_repository('source', format='1.9')
827
target = self.make_repository('target', format='1.6')
828
target = repository.Repository.open(trans.base)
829
stream_source = source._get_source(target._format)
830
self.assertIs(type(stream_source), vf_repository.StreamSource)
832
def test_stream_source_to_knit(self):
833
source = self.make_repository('source', format='pack-0.92')
834
target = self.make_repository('target', format='dirstate')
835
stream = source._get_source(target._format)
836
self.assertIs(type(stream), vf_repository.StreamSource)
839
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
840
"""Tests for _find_parent_ids_of_revisions."""
843
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
844
self.builder = self.make_branch_builder('source')
845
self.builder.start_series()
846
self.builder.build_snapshot(
848
[('add', ('', b'tree-root', 'directory', None))],
849
revision_id=b'initial')
850
self.repo = self.builder.get_branch().repository
851
self.addCleanup(self.builder.finish_series)
853
def assertParentIds(self, expected_result, rev_set):
855
sorted(expected_result),
856
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
858
def test_simple(self):
859
self.builder.build_snapshot(None, [], revision_id=b'revid1')
860
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
861
rev_set = [b'revid2']
862
self.assertParentIds([b'revid1'], rev_set)
864
def test_not_first_parent(self):
865
self.builder.build_snapshot(None, [], revision_id=b'revid1')
866
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2')
867
self.builder.build_snapshot([b'revid2'], [], revision_id=b'revid3')
868
rev_set = [b'revid3', b'revid2']
869
self.assertParentIds([b'revid1'], rev_set)
871
def test_not_null(self):
872
rev_set = [b'initial']
873
self.assertParentIds([], rev_set)
875
def test_not_null_set(self):
876
self.builder.build_snapshot(None, [], revision_id=b'revid1')
877
rev_set = [_mod_revision.NULL_REVISION]
878
self.assertParentIds([], rev_set)
880
def test_ghost(self):
881
self.builder.build_snapshot(None, [], revision_id=b'revid1')
882
rev_set = [b'ghost', b'revid1']
883
self.assertParentIds([b'initial'], rev_set)
885
def test_ghost_parent(self):
886
self.builder.build_snapshot(None, [], revision_id=b'revid1')
887
self.builder.build_snapshot(
888
[b'revid1', b'ghost'], [], revision_id=b'revid2')
889
rev_set = [b'revid2', b'revid1']
890
self.assertParentIds([b'ghost', b'initial'], rev_set)
892
def test_righthand_parent(self):
893
self.builder.build_snapshot(None, [], revision_id=b'revid1')
894
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2a')
895
self.builder.build_snapshot([b'revid1'], [], revision_id=b'revid2b')
896
self.builder.build_snapshot([b'revid2a', b'revid2b'], [],
897
revision_id=b'revid3')
898
rev_set = [b'revid3', b'revid2a']
899
self.assertParentIds([b'revid1', b'revid2b'], rev_set)
902
class TestWithBrokenRepo(TestCaseWithTransport):
903
"""These tests seem to be more appropriate as interface tests?"""
905
def make_broken_repository(self):
906
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
907
# parent references" branch which is due to land in bzr.dev soon. Once
908
# it does, this duplication should be removed.
909
repo = self.make_repository('broken-repo')
913
cleanups.append(repo.unlock)
914
repo.start_write_group()
915
cleanups.append(repo.commit_write_group)
916
# make rev1a: A well-formed revision, containing 'file1'
917
inv = inventory.Inventory(revision_id=b'rev1a')
918
inv.root.revision = b'rev1a'
919
self.add_file(repo, inv, 'file1', b'rev1a', [])
920
repo.texts.add_lines((inv.root.file_id, b'rev1a'), [], [])
921
repo.add_inventory(b'rev1a', inv, [])
922
revision = _mod_revision.Revision(
924
committer='jrandom@example.com', timestamp=0,
925
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
926
repo.add_revision(b'rev1a', revision, inv)
928
# make rev1b, which has no Revision, but has an Inventory, and
930
inv = inventory.Inventory(revision_id=b'rev1b')
931
inv.root.revision = b'rev1b'
932
self.add_file(repo, inv, 'file1', b'rev1b', [])
933
repo.add_inventory(b'rev1b', inv, [])
935
# make rev2, with file1 and file2
937
# file1 has 'rev1b' as an ancestor, even though this is not
938
# mentioned by 'rev1a', making it an unreferenced ancestor
939
inv = inventory.Inventory()
940
self.add_file(repo, inv, 'file1', b'rev2', [b'rev1a', b'rev1b'])
941
self.add_file(repo, inv, 'file2', b'rev2', [])
942
self.add_revision(repo, b'rev2', inv, [b'rev1a'])
944
# make ghost revision rev1c
945
inv = inventory.Inventory()
946
self.add_file(repo, inv, 'file2', b'rev1c', [])
948
# make rev3 with file2
949
# file2 refers to 'rev1c', which is a ghost in this repository, so
950
# file2 cannot have rev1c as its ancestor.
951
inv = inventory.Inventory()
952
self.add_file(repo, inv, 'file2', b'rev3', [b'rev1c'])
953
self.add_revision(repo, b'rev3', inv, [b'rev1c'])
956
for cleanup in reversed(cleanups):
959
def add_revision(self, repo, revision_id, inv, parent_ids):
960
inv.revision_id = revision_id
961
inv.root.revision = revision_id
962
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
963
repo.add_inventory(revision_id, inv, parent_ids)
964
revision = _mod_revision.Revision(
966
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
967
timezone=0, message='foo', parent_ids=parent_ids)
968
repo.add_revision(revision_id, revision, inv)
970
def add_file(self, repo, inv, filename, revision, parents):
971
file_id = filename.encode('utf-8') + b'-id'
972
content = [b'line\n']
973
entry = inventory.InventoryFile(file_id, filename, b'TREE_ROOT')
974
entry.revision = revision
975
entry.text_sha1 = osutils.sha_strings(content)
978
text_key = (file_id, revision)
979
parent_keys = [(file_id, parent) for parent in parents]
980
repo.texts.add_lines(text_key, parent_keys, content)
982
def test_insert_from_broken_repo(self):
983
"""Inserting a data stream from a broken repository won't silently
984
corrupt the target repository.
986
broken_repo = self.make_broken_repository()
987
empty_repo = self.make_repository('empty-repo')
989
empty_repo.fetch(broken_repo)
990
except (errors.RevisionNotPresent, errors.BzrCheckError):
991
# Test successful: compression parent not being copied leads to
994
empty_repo.lock_read()
995
self.addCleanup(empty_repo.unlock)
996
text = next(empty_repo.texts.get_record_stream(
997
[(b'file2-id', b'rev3')], 'topological', True))
998
self.assertEqual(b'line\n', text.get_bytes_as('fulltext'))
1001
class TestRepositoryPackCollection(TestCaseWithTransport):
1003
def get_format(self):
1004
return controldir.format_registry.make_controldir('pack-0.92')
1006
def get_packs(self):
1007
format = self.get_format()
1008
repo = self.make_repository('.', format=format)
1009
return repo._pack_collection
1011
def make_packs_and_alt_repo(self, write_lock=False):
1012
"""Create a pack repo with 3 packs, and access it via a second repo."""
1013
tree = self.make_branch_and_tree('.', format=self.get_format())
1015
self.addCleanup(tree.unlock)
1016
rev1 = tree.commit('one')
1017
rev2 = tree.commit('two')
1018
rev3 = tree.commit('three')
1019
r = repository.Repository.open('.')
1024
self.addCleanup(r.unlock)
1025
packs = r._pack_collection
1026
packs.ensure_loaded()
1027
return tree, r, packs, [rev1, rev2, rev3]
1029
def test__clear_obsolete_packs(self):
1030
packs = self.get_packs()
1031
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1032
obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1033
obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1034
obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1035
obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1036
obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1037
res = packs._clear_obsolete_packs()
1038
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1039
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1041
def test__clear_obsolete_packs_preserve(self):
1042
packs = self.get_packs()
1043
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1044
obsolete_pack_trans.put_bytes('a-pack.pack', b'content\n')
1045
obsolete_pack_trans.put_bytes('a-pack.rix', b'content\n')
1046
obsolete_pack_trans.put_bytes('a-pack.iix', b'content\n')
1047
obsolete_pack_trans.put_bytes('another-pack.pack', b'foo\n')
1048
obsolete_pack_trans.put_bytes('not-a-pack.rix', b'foo\n')
1049
res = packs._clear_obsolete_packs(preserve={'a-pack'})
1050
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1051
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1052
sorted(obsolete_pack_trans.list_dir('.')))
1054
def test__max_pack_count(self):
1055
"""The maximum pack count is a function of the number of revisions."""
1056
# no revisions - one pack, so that we can have a revision free repo
1057
# without it blowing up
1058
packs = self.get_packs()
1059
self.assertEqual(1, packs._max_pack_count(0))
1060
# after that the sum of the digits, - check the first 1-9
1061
self.assertEqual(1, packs._max_pack_count(1))
1062
self.assertEqual(2, packs._max_pack_count(2))
1063
self.assertEqual(3, packs._max_pack_count(3))
1064
self.assertEqual(4, packs._max_pack_count(4))
1065
self.assertEqual(5, packs._max_pack_count(5))
1066
self.assertEqual(6, packs._max_pack_count(6))
1067
self.assertEqual(7, packs._max_pack_count(7))
1068
self.assertEqual(8, packs._max_pack_count(8))
1069
self.assertEqual(9, packs._max_pack_count(9))
1070
# check the boundary cases with two digits for the next decade
1071
self.assertEqual(1, packs._max_pack_count(10))
1072
self.assertEqual(2, packs._max_pack_count(11))
1073
self.assertEqual(10, packs._max_pack_count(19))
1074
self.assertEqual(2, packs._max_pack_count(20))
1075
self.assertEqual(3, packs._max_pack_count(21))
1076
# check some arbitrary big numbers
1077
self.assertEqual(25, packs._max_pack_count(112894))
1079
def test_repr(self):
1080
packs = self.get_packs()
1081
self.assertContainsRe(repr(packs),
1082
'RepositoryPackCollection(.*Repository(.*))')
1084
def test__obsolete_packs(self):
1085
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1086
names = packs.names()
1087
pack = packs.get_pack_by_name(names[0])
1088
# Schedule this one for removal
1089
packs._remove_pack_from_memory(pack)
1090
# Simulate a concurrent update by renaming the .pack file and one of
1092
packs.transport.rename('packs/%s.pack' % (names[0],),
1093
'obsolete_packs/%s.pack' % (names[0],))
1094
packs.transport.rename('indices/%s.iix' % (names[0],),
1095
'obsolete_packs/%s.iix' % (names[0],))
1096
# Now trigger the obsoletion, and ensure that all the remaining files
1098
packs._obsolete_packs([pack])
1099
self.assertEqual([n + '.pack' for n in names[1:]],
1100
sorted(packs._pack_transport.list_dir('.')))
1101
# names[0] should not be present in the index anymore
1102
self.assertEqual(names[1:],
1103
sorted({osutils.splitext(n)[0] for n in
1104
packs._index_transport.list_dir('.')}))
1106
def test__obsolete_packs_missing_directory(self):
1107
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1108
r.control_transport.rmdir('obsolete_packs')
1109
names = packs.names()
1110
pack = packs.get_pack_by_name(names[0])
1111
# Schedule this one for removal
1112
packs._remove_pack_from_memory(pack)
1113
# Now trigger the obsoletion, and ensure that all the remaining files
1115
packs._obsolete_packs([pack])
1116
self.assertEqual([n + '.pack' for n in names[1:]],
1117
sorted(packs._pack_transport.list_dir('.')))
1118
# names[0] should not be present in the index anymore
1119
self.assertEqual(names[1:],
1120
sorted({osutils.splitext(n)[0] for n in
1121
packs._index_transport.list_dir('.')}))
1123
def test_pack_distribution_zero(self):
1124
packs = self.get_packs()
1125
self.assertEqual([0], packs.pack_distribution(0))
1127
def test_ensure_loaded_unlocked(self):
1128
packs = self.get_packs()
1129
self.assertRaises(errors.ObjectNotLocked,
1130
packs.ensure_loaded)
1132
def test_pack_distribution_one_to_nine(self):
1133
packs = self.get_packs()
1134
self.assertEqual([1],
1135
packs.pack_distribution(1))
1136
self.assertEqual([1, 1],
1137
packs.pack_distribution(2))
1138
self.assertEqual([1, 1, 1],
1139
packs.pack_distribution(3))
1140
self.assertEqual([1, 1, 1, 1],
1141
packs.pack_distribution(4))
1142
self.assertEqual([1, 1, 1, 1, 1],
1143
packs.pack_distribution(5))
1144
self.assertEqual([1, 1, 1, 1, 1, 1],
1145
packs.pack_distribution(6))
1146
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1147
packs.pack_distribution(7))
1148
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1149
packs.pack_distribution(8))
1150
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1151
packs.pack_distribution(9))
1153
def test_pack_distribution_stable_at_boundaries(self):
1154
"""When there are multi-rev packs the counts are stable."""
1155
packs = self.get_packs()
1157
self.assertEqual([10], packs.pack_distribution(10))
1158
self.assertEqual([10, 1], packs.pack_distribution(11))
1159
self.assertEqual([10, 10], packs.pack_distribution(20))
1160
self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1162
self.assertEqual([100], packs.pack_distribution(100))
1163
self.assertEqual([100, 1], packs.pack_distribution(101))
1164
self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1165
self.assertEqual([100, 100], packs.pack_distribution(200))
1166
self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1167
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1169
def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1170
packs = self.get_packs()
1171
existing_packs = [(2000, "big"), (9, "medium")]
1172
# rev count - 2009 -> 2x1000 + 9x1
1173
pack_operations = packs.plan_autopack_combinations(
1174
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1175
self.assertEqual([], pack_operations)
1177
def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1178
packs = self.get_packs()
1179
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1180
# rev count - 2010 -> 2x1000 + 1x10
1181
pack_operations = packs.plan_autopack_combinations(
1182
existing_packs, [1000, 1000, 10])
1183
self.assertEqual([], pack_operations)
1185
def test_plan_pack_operations_2010_combines_smallest_two(self):
1186
packs = self.get_packs()
1187
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1189
# rev count - 2010 -> 2x1000 + 1x10 (3)
1190
pack_operations = packs.plan_autopack_combinations(
1191
existing_packs, [1000, 1000, 10])
1192
self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1194
def test_plan_pack_operations_creates_a_single_op(self):
1195
packs = self.get_packs()
1196
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1197
(10, 'e'), (6, 'f'), (4, 'g')]
1198
# rev count 150 -> 1x100 and 5x10
1199
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
1200
# be combined into a single 120 size pack, and the 6 & 4 would
1201
# becombined into a size 10 pack. However, if we have to rewrite them,
1202
# we save a pack file with no increased I/O by putting them into the
1204
distribution = packs.pack_distribution(150)
1205
pack_operations = packs.plan_autopack_combinations(existing_packs,
1207
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1209
def test_all_packs_none(self):
1210
format = self.get_format()
1211
tree = self.make_branch_and_tree('.', format=format)
1213
self.addCleanup(tree.unlock)
1214
packs = tree.branch.repository._pack_collection
1215
packs.ensure_loaded()
1216
self.assertEqual([], packs.all_packs())
1218
def test_all_packs_one(self):
1219
format = self.get_format()
1220
tree = self.make_branch_and_tree('.', format=format)
1221
tree.commit('start')
1223
self.addCleanup(tree.unlock)
1224
packs = tree.branch.repository._pack_collection
1225
packs.ensure_loaded()
1227
packs.get_pack_by_name(packs.names()[0])],
1230
def test_all_packs_two(self):
1231
format = self.get_format()
1232
tree = self.make_branch_and_tree('.', format=format)
1233
tree.commit('start')
1234
tree.commit('continue')
1236
self.addCleanup(tree.unlock)
1237
packs = tree.branch.repository._pack_collection
1238
packs.ensure_loaded()
1240
packs.get_pack_by_name(packs.names()[0]),
1241
packs.get_pack_by_name(packs.names()[1]),
1242
], packs.all_packs())
1244
def test_get_pack_by_name(self):
1245
format = self.get_format()
1246
tree = self.make_branch_and_tree('.', format=format)
1247
tree.commit('start')
1249
self.addCleanup(tree.unlock)
1250
packs = tree.branch.repository._pack_collection
1252
packs.ensure_loaded()
1253
name = packs.names()[0]
1254
pack_1 = packs.get_pack_by_name(name)
1255
# the pack should be correctly initialised
1256
sizes = packs._names[name]
1257
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1258
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1259
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1260
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1262
pack_repo.ExistingPack(
1263
packs._pack_transport, name, rev_index, inv_index, txt_index,
1265
# and the same instance should be returned on successive calls.
1266
self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1268
def test_reload_pack_names_new_entry(self):
1269
tree, r, packs, revs = self.make_packs_and_alt_repo()
1270
names = packs.names()
1271
# Add a new pack file into the repository
1272
rev4 = tree.commit('four')
1273
new_names = tree.branch.repository._pack_collection.names()
1274
new_name = set(new_names).difference(names)
1275
self.assertEqual(1, len(new_name))
1276
new_name = new_name.pop()
1277
# The old collection hasn't noticed yet
1278
self.assertEqual(names, packs.names())
1279
self.assertTrue(packs.reload_pack_names())
1280
self.assertEqual(new_names, packs.names())
1281
# And the repository can access the new revision
1282
self.assertEqual({rev4: (revs[-1],)}, r.get_parent_map([rev4]))
1283
self.assertFalse(packs.reload_pack_names())
1285
def test_reload_pack_names_added_and_removed(self):
1286
tree, r, packs, revs = self.make_packs_and_alt_repo()
1287
names = packs.names()
1288
# Now repack the whole thing
1289
tree.branch.repository.pack()
1290
new_names = tree.branch.repository._pack_collection.names()
1291
# The other collection hasn't noticed yet
1292
self.assertEqual(names, packs.names())
1293
self.assertTrue(packs.reload_pack_names())
1294
self.assertEqual(new_names, packs.names())
1295
self.assertEqual({revs[-1]: (revs[-2],)}, r.get_parent_map([revs[-1]]))
1296
self.assertFalse(packs.reload_pack_names())
1298
def test_reload_pack_names_preserves_pending(self):
1299
# TODO: Update this to also test for pending-deleted names
1300
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1301
# We will add one pack (via start_write_group + insert_record_stream),
1302
# and remove another pack (via _remove_pack_from_memory)
1303
orig_names = packs.names()
1304
orig_at_load = packs._packs_at_load
1305
to_remove_name = next(iter(orig_names))
1306
r.start_write_group()
1307
self.addCleanup(r.abort_write_group)
1308
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1309
(b'text', b'rev'), (), None, b'content\n')])
1310
new_pack = packs._new_pack
1311
self.assertTrue(new_pack.data_inserted())
1313
packs.allocate(new_pack)
1314
packs._new_pack = None
1315
removed_pack = packs.get_pack_by_name(to_remove_name)
1316
packs._remove_pack_from_memory(removed_pack)
1317
names = packs.names()
1318
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1319
new_names = {x[0] for x in new_nodes}
1320
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1321
self.assertEqual(set(names) - set(orig_names), new_names)
1322
self.assertEqual({new_pack.name}, new_names)
1323
self.assertEqual([to_remove_name],
1324
sorted([x[0] for x in deleted_nodes]))
1325
packs.reload_pack_names()
1326
reloaded_names = packs.names()
1327
self.assertEqual(orig_at_load, packs._packs_at_load)
1328
self.assertEqual(names, reloaded_names)
1329
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1330
new_names = {x[0] for x in new_nodes}
1331
self.assertEqual(names, sorted([x[0] for x in all_nodes]))
1332
self.assertEqual(set(names) - set(orig_names), new_names)
1333
self.assertEqual({new_pack.name}, new_names)
1334
self.assertEqual([to_remove_name],
1335
sorted([x[0] for x in deleted_nodes]))
1337
def test_autopack_obsoletes_new_pack(self):
1338
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1339
packs._max_pack_count = lambda x: 1
1340
packs.pack_distribution = lambda x: [10]
1341
r.start_write_group()
1342
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1343
(b'bogus-rev',), (), None, b'bogus-content\n')])
1344
# This should trigger an autopack, which will combine everything into a
1346
r.commit_write_group()
1347
names = packs.names()
1348
self.assertEqual(1, len(names))
1349
self.assertEqual([names[0] + '.pack'],
1350
packs._pack_transport.list_dir('.'))
1352
def test_autopack_reloads_and_stops(self):
1353
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1354
# After we have determined what needs to be autopacked, trigger a
1355
# full-pack via the other repo which will cause us to re-evaluate and
1356
# decide we don't need to do anything
1357
orig_execute = packs._execute_pack_operations
1359
def _munged_execute_pack_ops(*args, **kwargs):
1360
tree.branch.repository.pack()
1361
return orig_execute(*args, **kwargs)
1362
packs._execute_pack_operations = _munged_execute_pack_ops
1363
packs._max_pack_count = lambda x: 1
1364
packs.pack_distribution = lambda x: [10]
1365
self.assertFalse(packs.autopack())
1366
self.assertEqual(1, len(packs.names()))
1367
self.assertEqual(tree.branch.repository._pack_collection.names(),
1370
def test__save_pack_names(self):
1371
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1372
names = packs.names()
1373
pack = packs.get_pack_by_name(names[0])
1374
packs._remove_pack_from_memory(pack)
1375
packs._save_pack_names(obsolete_packs=[pack])
1376
cur_packs = packs._pack_transport.list_dir('.')
1377
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1378
# obsolete_packs will also have stuff like .rix and .iix present.
1379
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1380
obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1381
self.assertEqual([pack.name], sorted(obsolete_names))
1383
def test__save_pack_names_already_obsoleted(self):
1384
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1385
names = packs.names()
1386
pack = packs.get_pack_by_name(names[0])
1387
packs._remove_pack_from_memory(pack)
1388
# We are going to simulate a concurrent autopack by manually obsoleting
1389
# the pack directly.
1390
packs._obsolete_packs([pack])
1391
packs._save_pack_names(clear_obsolete_packs=True,
1392
obsolete_packs=[pack])
1393
cur_packs = packs._pack_transport.list_dir('.')
1394
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1395
# Note that while we set clear_obsolete_packs=True, it should not
1396
# delete a pack file that we have also scheduled for obsoletion.
1397
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1398
obsolete_names = {osutils.splitext(n)[0] for n in obsolete_packs}
1399
self.assertEqual([pack.name], sorted(obsolete_names))
1401
def test_pack_no_obsolete_packs_directory(self):
1402
"""Bug #314314, don't fail if obsolete_packs directory does
1404
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1405
r.control_transport.rmdir('obsolete_packs')
1406
packs._clear_obsolete_packs()
1409
class TestPack(TestCaseWithTransport):
1410
"""Tests for the Pack object."""
1412
def assertCurrentlyEqual(self, left, right):
1413
self.assertTrue(left == right)
1414
self.assertTrue(right == left)
1415
self.assertFalse(left != right)
1416
self.assertFalse(right != left)
1418
def assertCurrentlyNotEqual(self, left, right):
1419
self.assertFalse(left == right)
1420
self.assertFalse(right == left)
1421
self.assertTrue(left != right)
1422
self.assertTrue(right != left)
1424
def test___eq____ne__(self):
1425
left = pack_repo.ExistingPack('', '', '', '', '', '')
1426
right = pack_repo.ExistingPack('', '', '', '', '', '')
1427
self.assertCurrentlyEqual(left, right)
1428
# change all attributes and ensure equality changes as we do.
1429
left.revision_index = 'a'
1430
self.assertCurrentlyNotEqual(left, right)
1431
right.revision_index = 'a'
1432
self.assertCurrentlyEqual(left, right)
1433
left.inventory_index = 'a'
1434
self.assertCurrentlyNotEqual(left, right)
1435
right.inventory_index = 'a'
1436
self.assertCurrentlyEqual(left, right)
1437
left.text_index = 'a'
1438
self.assertCurrentlyNotEqual(left, right)
1439
right.text_index = 'a'
1440
self.assertCurrentlyEqual(left, right)
1441
left.signature_index = 'a'
1442
self.assertCurrentlyNotEqual(left, right)
1443
right.signature_index = 'a'
1444
self.assertCurrentlyEqual(left, right)
1446
self.assertCurrentlyNotEqual(left, right)
1448
self.assertCurrentlyEqual(left, right)
1449
left.transport = 'a'
1450
self.assertCurrentlyNotEqual(left, right)
1451
right.transport = 'a'
1452
self.assertCurrentlyEqual(left, right)
1454
def test_file_name(self):
1455
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1456
self.assertEqual('a_name.pack', pack.file_name())
1459
class TestNewPack(TestCaseWithTransport):
1460
"""Tests for pack_repo.NewPack."""
1462
def test_new_instance_attributes(self):
1463
upload_transport = self.get_transport('upload')
1464
pack_transport = self.get_transport('pack')
1465
index_transport = self.get_transport('index')
1466
upload_transport.mkdir('.')
1467
collection = pack_repo.RepositoryPackCollection(
1469
transport=self.get_transport('.'),
1470
index_transport=index_transport,
1471
upload_transport=upload_transport,
1472
pack_transport=pack_transport,
1473
index_builder_class=BTreeBuilder,
1474
index_class=BTreeGraphIndex,
1475
use_chk_index=False)
1476
pack = pack_repo.NewPack(collection)
1477
self.addCleanup(pack.abort) # Make sure the write stream gets closed
1478
self.assertIsInstance(pack.revision_index, BTreeBuilder)
1479
self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1480
self.assertIsInstance(pack._hash, type(osutils.md5()))
1481
self.assertTrue(pack.upload_transport is upload_transport)
1482
self.assertTrue(pack.index_transport is index_transport)
1483
self.assertTrue(pack.pack_transport is pack_transport)
1484
self.assertEqual(None, pack.index_sizes)
1485
self.assertEqual(20, len(pack.random_name))
1486
self.assertIsInstance(pack.random_name, str)
1487
self.assertIsInstance(pack.start_time, float)
1490
class TestPacker(TestCaseWithTransport):
1491
"""Tests for the packs repository Packer class."""
1493
def test_pack_optimizes_pack_order(self):
1494
builder = self.make_branch_builder('.', format="1.9")
1495
builder.start_series()
1496
builder.build_snapshot(None, [
1497
('add', ('', b'root-id', 'directory', None)),
1498
('add', ('f', b'f-id', 'file', b'content\n'))],
1500
builder.build_snapshot([b'A'],
1501
[('modify', ('f', b'new-content\n'))],
1503
builder.build_snapshot([b'B'],
1504
[('modify', ('f', b'third-content\n'))],
1506
builder.build_snapshot([b'C'],
1507
[('modify', ('f', b'fourth-content\n'))],
1509
b = builder.get_branch()
1511
builder.finish_series()
1512
self.addCleanup(b.unlock)
1513
# At this point, we should have 4 pack files available
1514
# Because of how they were built, they correspond to
1515
# ['D', 'C', 'B', 'A']
1516
packs = b.repository._pack_collection.packs
1517
packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1519
revision_ids=[b'B', b'C'])
1520
# Now, when we are copying the B & C revisions, their pack files should
1521
# be moved to the front of the stack
1522
# The new ordering moves B & C to the front of the .packs attribute,
1523
# and leaves the others in the original order.
1524
new_packs = [packs[1], packs[2], packs[0], packs[3]]
1526
self.assertEqual(new_packs, packer.packs)
1529
class TestOptimisingPacker(TestCaseWithTransport):
1530
"""Tests for the OptimisingPacker class."""
1532
def get_pack_collection(self):
1533
repo = self.make_repository('.')
1534
return repo._pack_collection
1536
def test_open_pack_will_optimise(self):
1537
packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1539
new_pack = packer.open_pack()
1540
self.addCleanup(new_pack.abort) # ensure cleanup
1541
self.assertIsInstance(new_pack, pack_repo.NewPack)
1542
self.assertTrue(new_pack.revision_index._optimize_for_size)
1543
self.assertTrue(new_pack.inventory_index._optimize_for_size)
1544
self.assertTrue(new_pack.text_index._optimize_for_size)
1545
self.assertTrue(new_pack.signature_index._optimize_for_size)
1548
class TestGCCHKPacker(TestCaseWithTransport):
1550
def make_abc_branch(self):
1551
builder = self.make_branch_builder('source')
1552
builder.start_series()
1553
builder.build_snapshot(None, [
1554
('add', ('', b'root-id', 'directory', None)),
1555
('add', ('file', b'file-id', 'file', b'content\n')),
1556
], revision_id=b'A')
1557
builder.build_snapshot([b'A'], [
1558
('add', ('dir', b'dir-id', 'directory', None))],
1560
builder.build_snapshot([b'B'], [
1561
('modify', ('file', b'new content\n'))],
1563
builder.finish_series()
1564
return builder.get_branch()
1566
def make_branch_with_disjoint_inventory_and_revision(self):
1567
"""a repo with separate packs for a revisions Revision and Inventory.
1569
There will be one pack file that holds the Revision content, and one
1570
for the Inventory content.
1572
:return: (repository,
1573
pack_name_with_rev_A_Revision,
1574
pack_name_with_rev_A_Inventory,
1575
pack_name_with_rev_C_content)
1577
b_source = self.make_abc_branch()
1578
b_base = b_source.controldir.sprout(
1579
'base', revision_id=b'A').open_branch()
1580
b_stacked = b_base.controldir.sprout(
1581
'stacked', stacked=True).open_branch()
1582
b_stacked.lock_write()
1583
self.addCleanup(b_stacked.unlock)
1584
b_stacked.fetch(b_source, b'B')
1585
# Now re-open the stacked repo directly (no fallbacks) so that we can
1586
# fill in the A rev.
1587
repo_not_stacked = b_stacked.controldir.open_repository()
1588
repo_not_stacked.lock_write()
1589
self.addCleanup(repo_not_stacked.unlock)
1590
# Now we should have a pack file with A's inventory, but not its
1592
self.assertEqual([(b'A',), (b'B',)],
1593
sorted(repo_not_stacked.inventories.keys()))
1594
self.assertEqual([(b'B',)],
1595
sorted(repo_not_stacked.revisions.keys()))
1596
stacked_pack_names = repo_not_stacked._pack_collection.names()
1597
# We have a couple names here, figure out which has A's inventory
1598
for name in stacked_pack_names:
1599
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1600
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1602
inv_a_pack_name = name
1605
self.fail('Could not find pack containing A\'s inventory')
1606
repo_not_stacked.fetch(b_source.repository, b'A')
1607
self.assertEqual([(b'A',), (b'B',)],
1608
sorted(repo_not_stacked.revisions.keys()))
1609
new_pack_names = set(repo_not_stacked._pack_collection.names())
1610
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1611
self.assertEqual(1, len(rev_a_pack_names))
1612
rev_a_pack_name = list(rev_a_pack_names)[0]
1613
# Now fetch 'C', so we have a couple pack files to join
1614
repo_not_stacked.fetch(b_source.repository, b'C')
1615
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1616
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1617
self.assertEqual(1, len(rev_c_pack_names))
1618
rev_c_pack_name = list(rev_c_pack_names)[0]
1619
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1622
def test_pack_with_distant_inventories(self):
1623
# See https://bugs.launchpad.net/bzr/+bug/437003
1624
# When repacking, it is possible to have an inventory in a different
1625
# pack file than the associated revision. An autopack can then come
1626
# along, and miss that inventory, and complain.
1627
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1628
) = self.make_branch_with_disjoint_inventory_and_revision()
1629
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1630
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1631
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1632
[a_pack, c_pack], '.test-pack')
1633
# This would raise ValueError in bug #437003, but should not raise an
1637
def test_pack_with_missing_inventory(self):
1638
# Similar to test_pack_with_missing_inventory, but this time, we force
1639
# the A inventory to actually be gone from the repository.
1640
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1641
) = self.make_branch_with_disjoint_inventory_and_revision()
1642
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1643
repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1644
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1645
repo._pack_collection.all_packs(), '.test-pack')
1646
e = self.assertRaises(ValueError, packer.pack)
1647
packer.new_pack.abort()
1648
self.assertContainsRe(str(e),
1649
r"We are missing inventories for revisions: .*'A'")
1652
class TestCrossFormatPacks(TestCaseWithTransport):
1654
def log_pack(self, hint=None):
1655
self.calls.append(('pack', hint))
1656
self.orig_pack(hint=hint)
1657
if self.expect_hint:
1658
self.assertTrue(hint)
1660
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1661
self.expect_hint = expect_pack_called
1663
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1664
source_tree.lock_write()
1665
self.addCleanup(source_tree.unlock)
1666
tip = source_tree.commit('foo')
1667
target = self.make_repository('target', format=target_fmt)
1669
self.addCleanup(target.unlock)
1670
source = source_tree.branch.repository._get_source(target._format)
1671
self.orig_pack = target.pack
1672
self.overrideAttr(target, "pack", self.log_pack)
1673
search = target.search_missing_revision_ids(
1674
source_tree.branch.repository, revision_ids=[tip])
1675
stream = source.get_stream(search)
1676
from_format = source_tree.branch.repository._format
1677
sink = target._get_sink()
1678
sink.insert_stream(stream, from_format, [])
1679
if expect_pack_called:
1680
self.assertLength(1, self.calls)
1682
self.assertLength(0, self.calls)
1684
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1685
self.expect_hint = expect_pack_called
1687
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1688
source_tree.lock_write()
1689
self.addCleanup(source_tree.unlock)
1690
source_tree.commit('foo')
1691
target = self.make_repository('target', format=target_fmt)
1693
self.addCleanup(target.unlock)
1694
source = source_tree.branch.repository
1695
self.orig_pack = target.pack
1696
self.overrideAttr(target, "pack", self.log_pack)
1697
target.fetch(source)
1698
if expect_pack_called:
1699
self.assertLength(1, self.calls)
1701
self.assertLength(0, self.calls)
1703
def test_sink_format_hint_no(self):
1704
# When the target format says packing makes no difference, pack is not
1706
self.run_stream('1.9', 'rich-root-pack', False)
1708
def test_sink_format_hint_yes(self):
1709
# When the target format says packing makes a difference, pack is
1711
self.run_stream('1.9', '2a', True)
1713
def test_sink_format_same_no(self):
1714
# When the formats are the same, pack is not called.
1715
self.run_stream('2a', '2a', False)
1717
def test_IDS_format_hint_no(self):
1718
# When the target format says packing makes no difference, pack is not
1720
self.run_fetch('1.9', 'rich-root-pack', False)
1722
def test_IDS_format_hint_yes(self):
1723
# When the target format says packing makes a difference, pack is
1725
self.run_fetch('1.9', '2a', True)
1727
def test_IDS_format_same_no(self):
1728
# When the formats are the same, pack is not called.
1729
self.run_fetch('2a', '2a', False)
1732
class Test_LazyListJoin(tests.TestCase):
1734
def test__repr__(self):
1735
lazy = repository._LazyListJoin(['a'], ['b'])
1736
self.assertEqual("breezy.repository._LazyListJoin((['a'], ['b']))",
1740
class TestFeatures(tests.TestCaseWithTransport):
1742
def test_open_with_present_feature(self):
1744
bzrrepository.RepositoryFormatMetaDir.unregister_feature,
1745
b"makes-cheese-sandwich")
1746
bzrrepository.RepositoryFormatMetaDir.register_feature(
1747
b"makes-cheese-sandwich")
1748
repo = self.make_repository('.')
1750
repo._format.features[b"makes-cheese-sandwich"] = b"required"
1751
repo._format.check_support_status(False)
1754
def test_open_with_missing_required_feature(self):
1755
repo = self.make_repository('.')
1757
repo._format.features[b"makes-cheese-sandwich"] = b"required"
1758
self.assertRaises(bzrdir.MissingFeature,
1759
repo._format.check_support_status, False)