/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/tests/per_versionedfile.py

  • Committer: Jelmer Vernooij
  • Date: 2019-06-03 23:48:08 UTC
  • mfrom: (7316 work)
  • mto: This revision was merged to the branch mainline in revision 7328.
  • Revision ID: jelmer@jelmer.uk-20190603234808-15yk5c7054tj8e2b
Merge trunk.

Show diffs side-by-side

added added

removed removed

Lines of Context:
59
59
from ..transport.memory import MemoryTransport
60
60
from ..bzr import versionedfile as versionedfile
61
61
from ..bzr.versionedfile import (
62
 
    ChunkedContentFactory,
63
62
    ConstantMapper,
64
63
    HashEscapedPrefixMapper,
65
64
    PrefixMapper,
910
909
            return next(self.plan_merge_vf.get_record_stream(
911
910
                [(b'root', suffix)], 'unordered', True))
912
911
        self.assertEqual(b'a', get_record(b'A').get_bytes_as('fulltext'))
913
 
        self.assertEqual(b'a', b''.join(get_record(b'A').iter_bytes_as('chunked')))
914
912
        self.assertEqual(b'c', get_record(b'C').get_bytes_as('fulltext'))
915
913
        self.assertEqual(b'e', get_record(b'E:').get_bytes_as('fulltext'))
916
914
        self.assertEqual('absent', get_record('F').storage_kind)
1216
1214
        # Each is source_kind, requested_kind, adapter class
1217
1215
        scenarios = [
1218
1216
            ('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1219
 
            ('knit-delta-gz', 'lines', _mod_knit.DeltaPlainToFullText),
1220
 
            ('knit-delta-gz', 'chunked', _mod_knit.DeltaPlainToFullText),
1221
1217
            ('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1222
 
            ('knit-ft-gz', 'lines', _mod_knit.FTPlainToFullText),
1223
 
            ('knit-ft-gz', 'chunked', _mod_knit.FTPlainToFullText),
1224
1218
            ('knit-annotated-delta-gz', 'knit-delta-gz',
1225
1219
                _mod_knit.DeltaAnnotatedToUnannotated),
1226
1220
            ('knit-annotated-delta-gz', 'fulltext',
1229
1223
                _mod_knit.FTAnnotatedToUnannotated),
1230
1224
            ('knit-annotated-ft-gz', 'fulltext',
1231
1225
                _mod_knit.FTAnnotatedToFullText),
1232
 
            ('knit-annotated-ft-gz', 'lines',
1233
 
                _mod_knit.FTAnnotatedToFullText),
1234
 
            ('knit-annotated-ft-gz', 'chunked',
1235
 
                _mod_knit.FTAnnotatedToFullText),
1236
1226
            ]
1237
1227
        for source, requested, klass in scenarios:
1238
1228
            adapter_factory = versionedfile.adapter_registry.get(
1245
1235
        transport = self.get_transport()
1246
1236
        return make_file_factory(annotated, mapper)(transport)
1247
1237
 
1248
 
    def helpGetBytes(self, f, ft_name, ft_adapter, delta_name, delta_adapter):
 
1238
    def helpGetBytes(self, f, ft_adapter, delta_adapter):
1249
1239
        """Grab the interested adapted texts for tests."""
1250
1240
        # origin is a fulltext
1251
1241
        entries = f.get_record_stream([(b'origin',)], 'unordered', False)
1252
1242
        base = next(entries)
1253
 
        ft_data = ft_adapter.get_bytes(base, ft_name)
 
1243
        ft_data = ft_adapter.get_bytes(base)
1254
1244
        # merged is both a delta and multiple parents.
1255
1245
        entries = f.get_record_stream([(b'merged',)], 'unordered', False)
1256
1246
        merged = next(entries)
1257
 
        delta_data = delta_adapter.get_bytes(merged, delta_name)
 
1247
        delta_data = delta_adapter.get_bytes(merged)
1258
1248
        return ft_data, delta_data
1259
1249
 
1260
1250
    def test_deannotation_noeol(self):
1262
1252
        # we need a full text, and a delta
1263
1253
        f = self.get_knit()
1264
1254
        get_diamond_files(f, 1, trailing_eol=False)
1265
 
        ft_data, delta_data = self.helpGetBytes(
1266
 
            f, 'knit-ft-gz', _mod_knit.FTAnnotatedToUnannotated(None),
1267
 
            'knit-delta-gz', _mod_knit.DeltaAnnotatedToUnannotated(None))
 
1255
        ft_data, delta_data = self.helpGetBytes(f,
 
1256
                                                _mod_knit.FTAnnotatedToUnannotated(
 
1257
                                                    None),
 
1258
                                                _mod_knit.DeltaAnnotatedToUnannotated(None))
1268
1259
        self.assertEqual(
1269
1260
            b'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1270
1261
            b'origin\n'
1280
1271
        # we need a full text, and a delta
1281
1272
        f = self.get_knit()
1282
1273
        get_diamond_files(f, 1)
1283
 
        ft_data, delta_data = self.helpGetBytes(
1284
 
            f, 'knit-ft-gz', _mod_knit.FTAnnotatedToUnannotated(None),
1285
 
            'knit-delta-gz', _mod_knit.DeltaAnnotatedToUnannotated(None))
 
1274
        ft_data, delta_data = self.helpGetBytes(f,
 
1275
                                                _mod_knit.FTAnnotatedToUnannotated(
 
1276
                                                    None),
 
1277
                                                _mod_knit.DeltaAnnotatedToUnannotated(None))
1286
1278
        self.assertEqual(
1287
1279
            b'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1288
1280
            b'origin\n'
1301
1293
        # Reconstructing a full text requires a backing versioned file, and it
1302
1294
        # must have the base lines requested from it.
1303
1295
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1304
 
        ft_data, delta_data = self.helpGetBytes(
1305
 
            f, 'fulltext', _mod_knit.FTAnnotatedToFullText(None),
1306
 
            'fulltext', _mod_knit.DeltaAnnotatedToFullText(logged_vf))
 
1296
        ft_data, delta_data = self.helpGetBytes(f,
 
1297
                                                _mod_knit.FTAnnotatedToFullText(
 
1298
                                                    None),
 
1299
                                                _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1307
1300
        self.assertEqual(b'origin', ft_data)
1308
1301
        self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1309
1302
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1317
1310
        # Reconstructing a full text requires a backing versioned file, and it
1318
1311
        # must have the base lines requested from it.
1319
1312
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1320
 
        ft_data, delta_data = self.helpGetBytes(
1321
 
            f, 'fulltext', _mod_knit.FTAnnotatedToFullText(None),
1322
 
            'fulltext', _mod_knit.DeltaAnnotatedToFullText(logged_vf))
 
1313
        ft_data, delta_data = self.helpGetBytes(f,
 
1314
                                                _mod_knit.FTAnnotatedToFullText(
 
1315
                                                    None),
 
1316
                                                _mod_knit.DeltaAnnotatedToFullText(logged_vf))
1323
1317
        self.assertEqual(b'origin\n', ft_data)
1324
1318
        self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1325
1319
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1336
1330
        # Reconstructing a full text requires a backing versioned file, and it
1337
1331
        # must have the base lines requested from it.
1338
1332
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1339
 
        ft_data, delta_data = self.helpGetBytes(
1340
 
            f, 'fulltext', _mod_knit.FTPlainToFullText(None),
1341
 
            'fulltext', _mod_knit.DeltaPlainToFullText(logged_vf))
 
1333
        ft_data, delta_data = self.helpGetBytes(f,
 
1334
                                                _mod_knit.FTPlainToFullText(
 
1335
                                                    None),
 
1336
                                                _mod_knit.DeltaPlainToFullText(logged_vf))
1342
1337
        self.assertEqual(b'origin\n', ft_data)
1343
1338
        self.assertEqual(b'base\nleft\nright\nmerged\n', delta_data)
1344
1339
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1355
1350
        # Reconstructing a full text requires a backing versioned file, and it
1356
1351
        # must have the base lines requested from it.
1357
1352
        logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1358
 
        ft_data, delta_data = self.helpGetBytes(
1359
 
            f, 'fulltext', _mod_knit.FTPlainToFullText(None),
1360
 
            'fulltext', _mod_knit.DeltaPlainToFullText(logged_vf))
 
1353
        ft_data, delta_data = self.helpGetBytes(f,
 
1354
                                                _mod_knit.FTPlainToFullText(
 
1355
                                                    None),
 
1356
                                                _mod_knit.DeltaPlainToFullText(logged_vf))
1361
1357
        self.assertEqual(b'origin', ft_data)
1362
1358
        self.assertEqual(b'base\nleft\nright\nmerged', delta_data)
1363
1359
        self.assertEqual([('get_record_stream', [(b'left',)], 'unordered',
1542
1538
        records.sort()
1543
1539
        self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1544
1540
 
1545
 
    def test_add_chunks(self):
1546
 
        f = self.get_versionedfiles()
1547
 
        key0 = self.get_simple_key(b'r0')
1548
 
        key1 = self.get_simple_key(b'r1')
1549
 
        key2 = self.get_simple_key(b'r2')
1550
 
        keyf = self.get_simple_key(b'foo')
1551
 
        def add_chunks(key, parents, chunks):
1552
 
            factory = ChunkedContentFactory(
1553
 
                key, parents, osutils.sha_strings(chunks), chunks)
1554
 
            return f.add_content(factory)
1555
 
 
1556
 
        add_chunks(key0, [], [b'a', b'\nb\n'])
1557
 
        if self.graph:
1558
 
            add_chunks(key1, [key0], [b'b', b'\n', b'c\n'])
1559
 
        else:
1560
 
            add_chunks(key1, [], [b'b\n', b'c\n'])
1561
 
        keys = f.keys()
1562
 
        self.assertIn(key0, keys)
1563
 
        self.assertIn(key1, keys)
1564
 
        records = []
1565
 
        for record in f.get_record_stream([key0, key1], 'unordered', True):
1566
 
            records.append((record.key, record.get_bytes_as('fulltext')))
1567
 
        records.sort()
1568
 
        self.assertEqual([(key0, b'a\nb\n'), (key1, b'b\nc\n')], records)
1569
 
 
1570
1541
    def test_annotate(self):
1571
1542
        files = self.get_versionedfiles()
1572
1543
        self.get_diamond_files(files)
1942
1913
            self.assertIsInstance(ft_bytes, bytes)
1943
1914
            chunked_bytes = factory.get_bytes_as('chunked')
1944
1915
            self.assertEqualDiff(ft_bytes, b''.join(chunked_bytes))
1945
 
            chunked_bytes = factory.iter_bytes_as('chunked')
1946
 
            self.assertEqualDiff(ft_bytes, b''.join(chunked_bytes))
1947
1916
 
1948
1917
        self.assertStreamOrder(sort_order, seen, keys)
1949
1918