734
734
# the ordering here is to make a tree so that dumb searches have
735
735
# more changes to muck up.
737
class InstrumentedProgress(progress.DummyProgress):
737
class InstrumentedProgress(progress.ProgressTask):
739
739
def __init__(self):
741
progress.DummyProgress.__init__(self)
740
progress.ProgressTask.__init__(self)
742
741
self.updates = []
744
743
def update(self, msg=None, current=None, total=None):
1000
999
# we should be able to read from http with a versioned file.
1001
1000
vf = self.get_file()
1002
1001
# try an empty file access
1003
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1002
readonly_vf = self.get_factory()('foo', get_transport(
1003
self.get_readonly_url('.')))
1004
1004
self.assertEqual([], readonly_vf.versions())
1006
def test_readonly_http_works_with_feeling(self):
1007
# we should be able to read from http with a versioned file.
1008
vf = self.get_file()
1005
1009
# now with feeling.
1006
1010
vf.add_lines('1', [], ['a\n'])
1007
1011
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1008
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1012
readonly_vf = self.get_factory()('foo', get_transport(
1013
self.get_readonly_url('.')))
1009
1014
self.assertEqual(['1', '2'], vf.versions())
1015
self.assertEqual(['1', '2'], readonly_vf.versions())
1010
1016
for version in readonly_vf.versions():
1011
1017
readonly_vf.get_lines(version)
1470
1476
transport.mkdir('.')
1471
1477
files = self.factory(transport)
1472
1478
if self.cleanup is not None:
1473
self.addCleanup(lambda:self.cleanup(files))
1479
self.addCleanup(self.cleanup, files)
1476
1482
def get_simple_key(self, suffix):
1581
1587
# All texts should be output.
1582
1588
self.assertEqual(set(keys), seen)
1590
def test_clear_cache(self):
1591
files = self.get_versionedfiles()
1584
1594
def test_construct(self):
1585
1595
"""Each parameterised test can be constructed on a transport."""
1586
1596
files = self.get_versionedfiles()
2435
2445
self.assertIdenticalVersionedFile(source, files)
2447
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2448
"""An out of order stream can either error or work."""
2450
raise TestNotApplicable('ancestry info only relevant with graph.')
2451
# Create a reasonably long chain of records based on each other, where
2452
# most will be deltas.
2453
source = self.get_versionedfiles('source')
2456
content = [('same same %d\n' % n) for n in range(500)]
2457
for letter in 'abcdefghijklmnopqrstuvwxyz':
2458
key = ('key-' + letter,)
2459
if self.key_length == 2:
2460
key = ('prefix',) + key
2461
content.append('content for ' + letter + '\n')
2462
source.add_lines(key, parents, content)
2465
# Create a stream of these records, excluding the first record that the
2466
# rest ultimately depend upon, and insert it into a new vf.
2468
for key in reversed(keys):
2469
streams.append(source.get_record_stream([key], 'unordered', False))
2470
deltas = chain(*streams[:-1])
2471
files = self.get_versionedfiles()
2473
files.insert_record_stream(deltas)
2474
except RevisionNotPresent:
2475
# Must not have corrupted the file.
2478
# Must only report either just the first key as a missing parent,
2479
# no key as missing (for nodelta scenarios).
2480
missing = set(files.get_missing_compression_parent_keys())
2481
missing.discard(keys[0])
2482
self.assertEqual(set(), missing)
2437
2484
def get_knit_delta_source(self):
2438
2485
"""Get a source that can produce a stream with knit delta records,
2439
2486
regardless of this test's scenario.
2507
2554
# the ordering here is to make a tree so that dumb searches have
2508
2555
# more changes to muck up.
2510
class InstrumentedProgress(progress.DummyProgress):
2557
class InstrumentedProgress(progress.ProgressTask):
2512
2559
def __init__(self):
2514
progress.DummyProgress.__init__(self)
2560
progress.ProgressTask.__init__(self)
2515
2561
self.updates = []
2517
2563
def update(self, msg=None, current=None, total=None):