/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to bzrlib/versionedfile.py

Merge bzr.dev 4032. Resolve the new streaming fetch.

XXX: We cheat a bit for CHK fetching. CHK serializers happen to still
have legacy 'read_inventory_from_string' and 'write_inventory_to_string'
functions that convert the paged representation to a single-string XML
representation.

So when converting between formats, we just go down to the
whole-inventory XML form.

At least it works for now. Even if it is grossly innefficient.

Show diffs side-by-side

added added

removed removed

Lines of Context:
22
22
from copy import copy
23
23
from cStringIO import StringIO
24
24
import os
 
25
import struct
25
26
from zlib import adler32
26
27
 
27
28
from bzrlib.lazy_import import lazy_import
31
32
from bzrlib import (
32
33
    errors,
33
34
    index,
 
35
    knit,
34
36
    osutils,
35
37
    multiparent,
36
38
    tsort,
44
46
from bzrlib.registry import Registry
45
47
from bzrlib.symbol_versioning import *
46
48
from bzrlib.textmerge import TextMerge
 
49
from bzrlib.util import bencode
47
50
 
48
51
 
49
52
adapter_registry = Registry()
926
929
 
927
930
    has_key = index._has_key_from_parent_map
928
931
 
 
932
    def get_missing_compression_parent_keys(self):
 
933
        """Return an iterable of keys of missing compression parents.
 
934
 
 
935
        Check this after calling insert_record_stream to find out if there are
 
936
        any missing compression parents.  If there are, the records that
 
937
        depend on them are not able to be inserted safely. The precise
 
938
        behaviour depends on the concrete VersionedFiles class in use.
 
939
 
 
940
        Classes that do not support this will raise NotImplementedError.
 
941
        """
 
942
        raise NotImplementedError(self.get_missing_compression_parent_keys)
 
943
 
929
944
    def insert_record_stream(self, stream):
930
945
        """Insert a record stream into this container.
931
946
 
1473
1488
                pb.update("iterating texts", i, len(keys))
1474
1489
            for l in self._get_lines(key):
1475
1490
                yield (l, key)
 
1491
 
 
1492
 
 
1493
def network_bytes_to_kind_and_offset(network_bytes):
 
1494
    """Strip of a record kind from the front of network_bytes.
 
1495
 
 
1496
    :param network_bytes: The bytes of a record.
 
1497
    :return: A tuple (storage_kind, offset_of_remaining_bytes)
 
1498
    """
 
1499
    line_end = network_bytes.find('\n')
 
1500
    storage_kind = network_bytes[:line_end]
 
1501
    return storage_kind, line_end + 1
 
1502
 
 
1503
 
 
1504
class NetworkRecordStream(object):
 
1505
    """A record_stream which reconstitures a serialised stream."""
 
1506
 
 
1507
    def __init__(self, bytes_iterator):
 
1508
        """Create a NetworkRecordStream.
 
1509
 
 
1510
        :param bytes_iterator: An iterator of bytes. Each item in this
 
1511
            iterator should have been obtained from a record_streams'
 
1512
            record.get_bytes_as(record.storage_kind) call.
 
1513
        """
 
1514
        self._bytes_iterator = bytes_iterator
 
1515
        self._kind_factory = {'knit-ft-gz':knit.knit_network_to_record,
 
1516
            'knit-delta-gz':knit.knit_network_to_record,
 
1517
            'knit-annotated-ft-gz':knit.knit_network_to_record,
 
1518
            'knit-annotated-delta-gz':knit.knit_network_to_record,
 
1519
            'knit-delta-closure':knit.knit_delta_closure_to_records,
 
1520
            'fulltext':fulltext_network_to_record,
 
1521
            }
 
1522
 
 
1523
    def read(self):
 
1524
        """Read the stream.
 
1525
 
 
1526
        :return: An iterator as per VersionedFiles.get_record_stream().
 
1527
        """
 
1528
        for bytes in self._bytes_iterator:
 
1529
            storage_kind, line_end = network_bytes_to_kind_and_offset(bytes)
 
1530
            for record in self._kind_factory[storage_kind](
 
1531
                storage_kind, bytes, line_end):
 
1532
                yield record
 
1533
 
 
1534
 
 
1535
def fulltext_network_to_record(kind, bytes, line_end):
 
1536
    """Convert a network fulltext record to record."""
 
1537
    meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
 
1538
    record_meta = record_bytes[line_end+4:line_end+4+meta_len]
 
1539
    key, parents = bencode.bdecode_as_tuple(record_meta)
 
1540
    if parents == 'nil':
 
1541
        parents = None
 
1542
    fulltext = record_bytes[line_end+4+meta_len:]
 
1543
    return FulltextContentFactory(key, parents, None, fulltext)
 
1544
 
 
1545
 
 
1546
def _length_prefix(bytes):
 
1547
    return struct.pack('!L', len(bytes))
 
1548
 
 
1549
 
 
1550
def record_to_fulltext_bytes(self, record):
 
1551
    if record.parents is None:
 
1552
        parents = 'nil'
 
1553
    else:
 
1554
        parents = record.parents
 
1555
    record_meta = bencode.bencode((record.key, parents))
 
1556
    record_content = record.get_bytes_as('fulltext')
 
1557
    return "fulltext\n%s%s%s" % (
 
1558
        _length_prefix(record_meta), record_meta, record_content)