54
54
from __future__ import absolute_import
57
from cStringIO import StringIO
58
from itertools import izip
59
from ..lazy_import import lazy_import
62
from bzrlib.lazy_import import lazy_import
60
63
lazy_import(globals(), """
66
69
graph as _mod_graph,
74
from breezy.bzr import (
79
from breezy.bzr import pack_repo
80
from breezy.i18n import gettext
80
from bzrlib.repofmt import pack_repo
81
from bzrlib.i18n import gettext
87
from ..errors import (
88
from bzrlib.errors import (
91
93
RevisionNotPresent,
93
from ..osutils import (
96
from bzrlib.osutils import (
94
97
contains_whitespace,
99
from ..sixish import (
105
from ..bzr.versionedfile import (
102
from bzrlib.versionedfile import (
107
104
AbsentContentFactory,
108
105
adapter_registry,
131
128
_STREAM_MIN_BUFFER_SIZE = 5*1024*1024
134
class KnitError(InternalBzrError):
139
class KnitCorrupt(KnitError):
141
_fmt = "Knit %(filename)s corrupt: %(how)s"
143
def __init__(self, filename, how):
144
KnitError.__init__(self)
145
self.filename = filename
149
class SHA1KnitCorrupt(KnitCorrupt):
151
_fmt = ("Knit %(filename)s corrupt: sha-1 of reconstructed text does not "
152
"match expected sha-1. key %(key)s expected sha %(expected)s actual "
155
def __init__(self, filename, actual, expected, key, content):
156
KnitError.__init__(self)
157
self.filename = filename
159
self.expected = expected
161
self.content = content
164
class KnitDataStreamIncompatible(KnitError):
165
# Not raised anymore, as we can convert data streams. In future we may
166
# need it again for more exotic cases, so we're keeping it around for now.
168
_fmt = "Cannot insert knit data stream of format \"%(stream_format)s\" into knit of format \"%(target_format)s\"."
170
def __init__(self, stream_format, target_format):
171
self.stream_format = stream_format
172
self.target_format = target_format
175
class KnitDataStreamUnknown(KnitError):
176
# Indicates a data stream we don't know how to handle.
178
_fmt = "Cannot parse knit data stream of format \"%(stream_format)s\"."
180
def __init__(self, stream_format):
181
self.stream_format = stream_format
184
class KnitHeaderError(KnitError):
186
_fmt = 'Knit header error: %(badline)r unexpected for file "%(filename)s".'
188
def __init__(self, badline, filename):
189
KnitError.__init__(self)
190
self.badline = badline
191
self.filename = filename
194
class KnitIndexUnknownMethod(KnitError):
195
"""Raised when we don't understand the storage method.
197
Currently only 'fulltext' and 'line-delta' are supported.
200
_fmt = ("Knit index %(filename)s does not have a known method"
201
" in options: %(options)r")
203
def __init__(self, filename, options):
204
KnitError.__init__(self)
205
self.filename = filename
206
self.options = options
209
131
class KnitAdapter(object):
210
132
"""Base class for knit record adaption."""
269
191
delta = self._annotate_factory.parse_line_delta(contents, rec[1],
271
193
compression_parent = factory.parents[0]
272
basis_entry = next(self._basis_vf.get_record_stream(
273
[compression_parent], 'unordered', True))
194
basis_entry = self._basis_vf.get_record_stream(
195
[compression_parent], 'unordered', True).next()
274
196
if basis_entry.storage_kind == 'absent':
275
197
raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
276
198
basis_chunks = basis_entry.get_bytes_as('chunked')
305
227
delta = self._plain_factory.parse_line_delta(contents, rec[1])
306
228
compression_parent = factory.parents[0]
307
229
# XXX: string splitting overhead.
308
basis_entry = next(self._basis_vf.get_record_stream(
309
[compression_parent], 'unordered', True))
230
basis_entry = self._basis_vf.get_record_stream(
231
[compression_parent], 'unordered', True).next()
310
232
if basis_entry.storage_kind == 'absent':
311
233
raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
312
234
basis_chunks = basis_entry.get_bytes_as('chunked')
548
470
def __init__(self, lines):
549
471
KnitContent.__init__(self)
550
self._lines = list(lines)
552
474
def annotate(self):
553
475
"""Return a list of (origin, text) for each content line."""
675
597
# but the code itself doesn't really depend on that.
676
598
# Figure out a way to not require the overhead of turning the
677
599
# list back into tuples.
678
lines = (tuple(line.split(' ', 1)) for line in content)
600
lines = [tuple(line.split(' ', 1)) for line in content]
679
601
return AnnotatedKnitContent(lines)
681
603
def parse_line_delta_iter(self, lines):
710
633
for header in lines:
711
634
start, end, count = [int(n) for n in header.split(',')]
712
contents = [next(lines).split(' ', 1)[1] for _ in range(count)]
635
contents = [next().split(' ', 1)[1] for i in xrange(count)]
713
636
result.append((start, end, count, contents))
715
638
for header in lines:
716
639
start, end, count = [int(n) for n in header.split(',')]
717
contents = [tuple(next(lines).split(' ', 1))
718
for _ in range(count)]
640
contents = [tuple(next().split(' ', 1)) for i in xrange(count)]
719
641
result.append((start, end, count, contents))
730
652
Only the actual content lines.
732
654
lines = iter(lines)
733
656
for header in lines:
734
657
header = header.split(',')
735
658
count = int(header[2])
736
for _ in range(count):
737
origin, text = next(lines).split(' ', 1)
659
for i in xrange(count):
660
origin, text = next().split(' ', 1)
740
663
def lower_fulltext(self, content):
920
844
if compression_parent not in all_build_index_memos:
921
845
next_keys.add(compression_parent)
922
846
build_keys = next_keys
923
return sum(index_memo[2]
924
for index_memo in viewvalues(all_build_index_memos))
847
return sum([index_memo[2] for index_memo
848
in all_build_index_memos.itervalues()])
927
851
class KnitVersionedFiles(VersionedFilesWithFallbacks):
948
872
stored during insertion.
949
873
:param reload_func: An function that can be called if we think we need
950
874
to reload the pack listing and try again. See
951
'breezy.bzr.pack_repo.AggregateIndex' for the signature.
875
'bzrlib.repofmt.pack_repo.AggregateIndex' for the signature.
953
877
self._index = index
954
878
self._access = data_access
990
914
# indexes can't directly store that, so we give them
991
915
# an empty tuple instead.
993
line_bytes = b''.join(lines)
917
line_bytes = ''.join(lines)
994
918
return self._add(key, lines, parents,
995
919
parent_texts, left_matching_blocks, nostore_sha, random_id,
996
920
line_bytes=line_bytes)
922
def _add_text(self, key, parents, text, nostore_sha=None, random_id=False):
923
"""See VersionedFiles._add_text()."""
924
self._index._check_write_ok()
925
self._check_add(key, None, random_id, check_content=False)
926
if text.__class__ is not str:
927
raise errors.BzrBadParameterUnicode("text")
929
# The caller might pass None if there is no graph data, but kndx
930
# indexes can't directly store that, so we give them
931
# an empty tuple instead.
933
return self._add(key, None, parents,
934
None, None, nostore_sha, random_id,
998
937
def _add(self, key, lines, parents, parent_texts,
999
938
left_matching_blocks, nostore_sha, random_id,
1044
983
# Note: line_bytes is not modified to add a newline, that is tracked
1045
984
# via the no_eol flag. 'lines' *is* modified, because that is the
1046
985
# general values needed by the Content code.
1047
if line_bytes and not line_bytes.endswith(b'\n'):
986
if line_bytes and line_bytes[-1] != '\n':
1048
987
options.append('no-eol')
1050
989
# Copy the existing list, or create a new one
1054
993
lines = lines[:]
1055
994
# Replace the last line with one that ends in a final newline
1056
lines[-1] = lines[-1] + b'\n'
995
lines[-1] = lines[-1] + '\n'
1057
996
if lines is None:
1058
997
lines = osutils.split_lines(line_bytes)
1060
999
for element in key[:-1]:
1061
if not isinstance(element, bytes):
1000
if type(element) is not str:
1062
1001
raise TypeError("key contains non-strings: %r" % (key,))
1063
1002
if key[-1] is None:
1064
1003
key = key[:-1] + ('sha1:' + digest,)
1065
elif not isinstance(key[-1], bytes):
1004
elif type(key[-1]) is not str:
1066
1005
raise TypeError("key contains non-strings: %r" % (key,))
1067
1006
# Knit hunks are still last-element only
1068
1007
version_id = key[-1]
1081
1020
options.append('line-delta')
1082
1021
store_lines = self._factory.lower_line_delta(delta_hunks)
1083
size, data = self._record_to_data(key, digest,
1022
size, bytes = self._record_to_data(key, digest,
1086
1025
options.append('fulltext')
1090
1029
# _record_to_data.
1091
1030
dense_lines = [line_bytes]
1093
dense_lines.append(b'\n')
1094
size, data = self._record_to_data(key, digest,
1032
dense_lines.append('\n')
1033
size, bytes = self._record_to_data(key, digest,
1095
1034
lines, dense_lines)
1097
1036
# get mixed annotation + content and feed it into the
1099
1038
store_lines = self._factory.lower_fulltext(content)
1100
size, data = self._record_to_data(key, digest,
1039
size, bytes = self._record_to_data(key, digest,
1103
access_memo = self._access.add_raw_records([(key, size)], data)[0]
1042
access_memo = self._access.add_raw_records([(key, size)], bytes)[0]
1104
1043
self._index.add_records(
1105
1044
((key, options, access_memo, parents),),
1106
1045
random_id=random_id)
1132
1071
if self._index.get_method(key) != 'fulltext':
1133
1072
compression_parent = parent_map[key][0]
1134
1073
if compression_parent not in parent_map:
1135
raise KnitCorrupt(self,
1074
raise errors.KnitCorrupt(self,
1136
1075
"Missing basis parent %s for %s" % (
1137
1076
compression_parent, key))
1138
1077
for fallback_vfs in self._immediate_fallback_vfs:
1189
1128
build_details = self._index.get_build_details([parent])
1190
1129
parent_details = build_details[parent]
1191
except (RevisionNotPresent, KeyError) as e:
1130
except (RevisionNotPresent, KeyError), e:
1192
1131
# Some basis is not locally present: always fulltext
1194
1133
index_memo, compression_parent, _, _ = parent_details
1235
1174
build_details = self._index.get_build_details(pending_components)
1236
1175
current_components = set(pending_components)
1237
1176
pending_components = set()
1238
for key, details in viewitems(build_details):
1177
for key, details in build_details.iteritems():
1239
1178
(index_memo, compression_parent, parents,
1240
1179
record_details) = details
1241
1180
method = record_details[0]
1342
1281
# key = component_id, r = record_details, i_m = index_memo,
1344
1283
records = [(key, i_m) for key, (r, i_m, n)
1345
in viewitems(position_map)]
1284
in position_map.iteritems()]
1346
1285
# Sort by the index memo, so that we request records from the
1347
1286
# same pack file together, and in forward-sorted order
1348
1287
records.sort(key=operator.itemgetter(1))
1351
1290
(record_details, index_memo, next) = position_map[key]
1352
1291
raw_record_map[key] = data, record_details, next
1353
1292
return raw_record_map
1354
except errors.RetryWithNewPacks as e:
1293
except errors.RetryWithNewPacks, e:
1355
1294
self._access.reload_or_raise(e)
1460
1399
remaining_keys.discard(content_factory.key)
1461
1400
yield content_factory
1463
except errors.RetryWithNewPacks as e:
1402
except errors.RetryWithNewPacks, e:
1464
1403
self._access.reload_or_raise(e)
1466
1405
def _get_remaining_record_stream(self, keys, ordering,
1473
1412
# map from key to
1474
1413
# (record_details, access_memo, compression_parent_key)
1475
1414
positions = dict((key, self._build_details_to_components(details))
1476
for key, details in viewitems(build_details))
1415
for key, details in build_details.iteritems())
1477
1416
absent_keys = keys.difference(set(positions))
1478
1417
# There may be more absent keys : if we're missing the basis component
1479
1418
# and are trying to include the delta closure.
1587
1526
missing = set(keys)
1588
1527
record_map = self._get_record_map(missing, allow_missing=True)
1590
for key, details in viewitems(record_map):
1529
for key, details in record_map.iteritems():
1591
1530
if key not in missing:
1593
1532
# record entry 2 is the 'digest'.
1625
1564
# self is not annotated, but we can strip annotations cheaply.
1627
convertibles = {"knit-annotated-ft-gz"}
1566
convertibles = set(["knit-annotated-ft-gz"])
1628
1567
if self._max_delta_chain:
1629
1568
delta_types.add("knit-annotated-delta-gz")
1630
1569
convertibles.add("knit-annotated-delta-gz")
1671
1610
elif ((record.storage_kind in knit_types)
1672
1611
and (compression_parent is None
1673
1612
or not self._immediate_fallback_vfs
1674
or compression_parent in self._index
1675
or compression_parent not in self)):
1613
or self._index.has_key(compression_parent)
1614
or not self.has_key(compression_parent))):
1676
1615
# we can insert the knit record literally if either it has no
1677
1616
# compression parent OR we already have its basis in this kvf
1678
1617
# OR the basis is not present even in the fallbacks. In the
1680
1619
# will be well, or it won't turn up at all and we'll raise an
1681
1620
# error at the end.
1683
# TODO: self.__contains__ is somewhat redundant with
1684
# self._index.__contains__; we really want something that directly
1622
# TODO: self.has_key is somewhat redundant with
1623
# self._index.has_key; we really want something that directly
1685
1624
# asks if it's only present in the fallbacks. -- mbp 20081119
1686
1625
if record.storage_kind not in native_types:
1721
1660
# They're required to be physically in this
1722
1661
# KnitVersionedFiles, not in a fallback.
1723
if compression_parent not in self._index:
1662
if not self._index.has_key(compression_parent):
1724
1663
pending = buffered_index_entries.setdefault(
1725
1664
compression_parent, [])
1726
1665
pending.append(index_entry)
1819
1758
# we need key, position, length
1820
1759
key_records = []
1821
1760
build_details = self._index.get_build_details(keys)
1822
for key, details in viewitems(build_details):
1761
for key, details in build_details.iteritems():
1823
1762
if key in keys:
1824
1763
key_records.append((key, details[0]))
1825
1764
records_iter = enumerate(self._read_records_iter(key_records))
1842
1781
for line in line_iterator:
1843
1782
yield line, key
1845
except errors.RetryWithNewPacks as e:
1784
except errors.RetryWithNewPacks, e:
1846
1785
self._access.reload_or_raise(e)
1847
1786
# If there are still keys we've not yet found, we look in the fallback
1848
1787
# vfs, and hope to find them there. Note that if the keys are found
1931
1870
:return: the header and the decompressor stream.
1932
1871
as (stream, header_record)
1934
df = gzip.GzipFile(mode='rb', fileobj=BytesIO(raw_data))
1873
df = gzip.GzipFile(mode='rb', fileobj=StringIO(raw_data))
1936
1875
# Current serialise
1937
1876
rec = self._check_header(key, df.readline())
1938
except Exception as e:
1877
except Exception, e:
1939
1878
raise KnitCorrupt(self,
1940
1879
"While reading {%s} got %s(%s)"
1941
1880
% (key, e.__class__.__name__, str(e)))
1946
1885
# 4168 calls in 2880 217 internal
1947
1886
# 4168 calls to _parse_record_header in 2121
1948
1887
# 4168 calls to readlines in 330
1949
with gzip.GzipFile(mode='rb', fileobj=BytesIO(data)) as df:
1951
record_contents = df.readlines()
1952
except Exception as e:
1953
raise KnitCorrupt(self, "Corrupt compressed record %r, got %s(%s)" %
1954
(data, e.__class__.__name__, str(e)))
1955
header = record_contents.pop(0)
1956
rec = self._split_header(header)
1957
last_line = record_contents.pop()
1958
if len(record_contents) != int(rec[2]):
1959
raise KnitCorrupt(self,
1960
'incorrect number of lines %s != %s'
1961
' for version {%s} %s'
1962
% (len(record_contents), int(rec[2]),
1963
rec[1], record_contents))
1964
if last_line != b'end %s\n' % rec[1]:
1965
raise KnitCorrupt(self,
1966
'unexpected version end line %r, wanted %r'
1967
% (last_line, rec[1]))
1888
df = gzip.GzipFile(mode='rb', fileobj=StringIO(data))
1890
record_contents = df.readlines()
1891
except Exception, e:
1892
raise KnitCorrupt(self, "Corrupt compressed record %r, got %s(%s)" %
1893
(data, e.__class__.__name__, str(e)))
1894
header = record_contents.pop(0)
1895
rec = self._split_header(header)
1896
last_line = record_contents.pop()
1897
if len(record_contents) != int(rec[2]):
1898
raise KnitCorrupt(self,
1899
'incorrect number of lines %s != %s'
1900
' for version {%s} %s'
1901
% (len(record_contents), int(rec[2]),
1902
rec[1], record_contents))
1903
if last_line != 'end %s\n' % rec[1]:
1904
raise KnitCorrupt(self,
1905
'unexpected version end line %r, wanted %r'
1906
% (last_line, rec[1]))
1968
1908
return rec, record_contents
1970
1910
def _read_records_iter(self, records):
1991
1931
raw_data = self._access.get_raw_records(
1992
1932
[index_memo for key, index_memo in needed_records])
1994
for (key, index_memo), data in zip(needed_records, raw_data):
1934
for (key, index_memo), data in \
1935
izip(iter(needed_records), raw_data):
1995
1936
content, digest = self._parse_record(key[-1], data)
1996
1937
yield key, content, digest
2027
1968
raw_records = self._access.get_raw_records(needed_offsets)
2029
1970
for key, index_memo in records:
2030
data = next(raw_records)
1971
data = raw_records.next()
2031
1972
yield key, data
2033
1974
def _record_to_data(self, key, digest, lines, dense_lines=None):
2041
1982
the 1000's lines and their \\n's. Using dense_lines if it is
2042
1983
already known is a win because the string join to create bytes in
2043
1984
this function spends less time resizing the final string.
2044
:return: (len, a BytesIO instance with the raw data ready to read.)
1985
:return: (len, a StringIO instance with the raw data ready to read.)
2046
chunks = [b"version %s %d %s\n" % (key[-1], len(lines), digest)]
1987
chunks = ["version %s %d %s\n" % (key[-1], len(lines), digest)]
2047
1988
chunks.extend(dense_lines or lines)
2048
chunks.append(b"end " + key[-1] + b"\n")
1989
chunks.append("end %s\n" % key[-1])
2049
1990
for chunk in chunks:
2050
if not isinstance(chunk, bytes):
1991
if type(chunk) is not str:
2051
1992
raise AssertionError(
2052
1993
'data must be plain bytes was %s' % type(chunk))
2053
if lines and not lines[-1].endswith(b'\n'):
1994
if lines and lines[-1][-1] != '\n':
2054
1995
raise ValueError('corrupt lines value %r' % lines)
2055
compressed_bytes = b''.join(tuned_gzip.chunks_to_gzip(chunks))
1996
compressed_bytes = tuned_gzip.chunks_to_gzip(chunks)
2056
1997
return len(compressed_bytes), compressed_bytes
2058
1999
def _split_header(self, line):
2084
2025
# Note that _get_content is only called when the _ContentMapGenerator
2085
2026
# has been constructed with just one key requested for reconstruction.
2086
2027
if key in self.nonlocal_keys:
2087
record = next(self.get_record_stream())
2028
record = self.get_record_stream().next()
2088
2029
# Create a content object on the fly
2089
2030
lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
2090
2031
return PlainKnitContent(lines, record.key)
2226
2167
# one line with next ('' for None)
2227
2168
# one line with byte count of the record bytes
2228
2169
# the record bytes
2229
for key, (record_bytes, (method, noeol), next) in viewitems(
2230
self._raw_record_map):
2170
for key, (record_bytes, (method, noeol), next) in \
2171
self._raw_record_map.iteritems():
2231
2172
key_bytes = '\x00'.join(key)
2232
2173
parents = self.global_map.get(key, None)
2233
2174
if parents is None:
2446
2387
ABI change with the C extension that reads .kndx files.
2449
HEADER = b"# bzr knit index 8\n"
2390
HEADER = "# bzr knit index 8\n"
2451
2392
def __init__(self, transport, mapper, get_scope, allow_writes, is_locked):
2452
2393
"""Create a _KndxIndex on transport using mapper."""
2497
2438
line = "\n%s %s %s %s %s :" % (
2498
2439
key[-1], ','.join(options), pos, size,
2499
2440
self._dictionary_compress(parents))
2500
if not isinstance(line, str):
2441
if type(line) is not str:
2501
2442
raise AssertionError(
2502
2443
'data must be utf8 was %s' % type(line))
2503
2444
lines.append(line)
2621
2562
elif 'line-delta' in options:
2622
2563
return 'line-delta'
2624
raise KnitIndexUnknownMethod(self, options)
2565
raise errors.KnitIndexUnknownMethod(self, options)
2626
2567
def get_options(self, key):
2627
2568
"""Return a list representing options.
2697
2638
entry = self._kndx_cache[prefix][0][suffix]
2698
2639
return key, entry[2], entry[3]
2700
__contains__ = _mod_index._has_key_from_parent_map
2641
has_key = _mod_index._has_key_from_parent_map
2702
2643
def _init_index(self, path, extra_lines=[]):
2703
2644
"""Initialize an index."""
2705
2646
sio.write(self.HEADER)
2706
2647
sio.writelines(extra_lines)
2720
2661
# Identify all key prefixes.
2721
2662
# XXX: A bit hacky, needs polish.
2722
if isinstance(self._mapper, ConstantMapper):
2663
if type(self._mapper) is ConstantMapper:
2723
2664
prefixes = [()]
2725
2666
relpaths = set()
2757
2698
del self._history
2758
2699
except NoSuchFile:
2759
2700
self._kndx_cache[prefix] = ({}, [])
2760
if isinstance(self._mapper, ConstantMapper):
2701
if type(self._mapper) is ConstantMapper:
2761
2702
# preserve behaviour for revisions.kndx etc.
2762
2703
self._init_index(path)
2763
2704
del self._cache
2844
2785
add_callback=None, track_external_parent_refs=False):
2845
2786
"""Construct a KnitGraphIndex on a graph_index.
2847
:param graph_index: An implementation of breezy.index.GraphIndex.
2788
:param graph_index: An implementation of bzrlib.index.GraphIndex.
2848
2789
:param is_locked: A callback to check whether the object should answer
2850
2791
:param deltas: Allow delta-compressed records.
2953
2894
if self._parents:
2954
for key, (value, node_refs) in viewitems(keys):
2895
for key, (value, node_refs) in keys.iteritems():
2955
2896
result.append((key, value, node_refs))
2957
for key, (value, node_refs) in viewitems(keys):
2898
for key, (value, node_refs) in keys.iteritems():
2958
2899
result.append((key, value))
2959
2900
self._add_callback(result)
2960
2901
if missing_compression_parents:
3149
3090
node = self._get_node(key)
3150
3091
return self._node_to_position(node)
3152
__contains__ = _mod_index._has_key_from_parent_map
3093
has_key = _mod_index._has_key_from_parent_map
3154
3095
def keys(self):
3155
3096
"""Get all the keys in the collection.
3214
3155
opaque index memo. For _KnitKeyAccess the memo is (key, pos,
3215
3156
length), where the key is the record key.
3217
if not isinstance(raw_data, bytes):
3158
if type(raw_data) is not str:
3218
3159
raise AssertionError(
3219
3160
'data must be plain bytes was %s' % type(raw_data))
3330
3271
self._all_build_details.update(build_details)
3331
3272
# new_nodes = self._vf._index._get_entries(this_iteration)
3332
3273
pending = set()
3333
for key, details in viewitems(build_details):
3274
for key, details in build_details.iteritems():
3334
3275
(index_memo, compression_parent, parent_keys,
3335
3276
record_details) = details
3336
3277
self._parent_map[key] = parent_keys
3352
3293
self._num_compression_children[compression_parent] = 1
3354
missing_versions = this_iteration.difference(build_details)
3295
missing_versions = this_iteration.difference(build_details.keys())
3355
3296
if missing_versions:
3356
3297
for key in missing_versions:
3357
3298
if key in self._parent_map and key in self._text_cache:
3393
3334
num_lines = len(text) # bad assumption
3394
3335
yield sub_key, text, num_lines
3396
except errors.RetryWithNewPacks as e:
3337
except errors.RetryWithNewPacks, e:
3397
3338
self._vf._access.reload_or_raise(e)
3398
3339
# The cached build_details are no longer valid
3399
3340
self._all_build_details.clear()
3558
3499
to_process.extend(self._process_pending(key))
3561
from ._knit_load_data_pyx import _load_data_c as _load_data
3562
except ImportError as e:
3502
from bzrlib._knit_load_data_pyx import _load_data_c as _load_data
3503
except ImportError, e:
3563
3504
osutils.failed_to_load_extension(e)
3564
from ._knit_load_data_py import _load_data_py as _load_data
3505
from bzrlib._knit_load_data_py import _load_data_py as _load_data