84
84
def _get_transport_modules():
85
85
"""Return a list of the modules providing transports."""
87
for prefix, factory_list in transport_list_registry.iteritems():
87
for prefix, factory_list in transport_list_registry.items():
88
88
for factory in factory_list:
89
89
if hasattr(factory, "_module_name"):
90
90
modules.add(factory._module_name)
100
100
class TransportListRegistry(registry.Registry):
101
101
"""A registry which simplifies tracking available Transports.
103
A registration of a new protocol requires two step:
103
A registration of a new protocol requires two steps:
104
104
1) register the prefix with the function register_transport( )
105
105
2) register the protocol provider with the function
106
106
register_transport_provider( ) ( and the "lazy" variant )
293
293
_bytes_to_read_before_seek = 0
295
295
def __init__(self, base):
296
super(Transport, self).__init__(base=base)
296
super(Transport, self).__init__()
299
299
def _translate_error(self, e, path, raise_generic=True):
688
688
# Now that we've read some data, see if we can yield anything back
689
689
while cur_offset_and_size in data_map:
690
690
this_data = data_map.pop(cur_offset_and_size)
691
yield cur_offset_and_size[0], this_data
692
cur_offset_and_size = offset_stack.next()
691
this_offset = cur_offset_and_size[0]
693
cur_offset_and_size = offset_stack.next()
694
except StopIteration:
695
# Close the file handle as there will be no more data
696
# The handle would normally be cleaned up as this code goes
697
# out of scope, but as we are a generator, not all code
698
# will re-enter once we have consumed all the expected
700
# zip(range(len(requests)), readv(foo, requests))
701
# Will stop because the range is done, and not run the
702
# cleanup code for the readv().
704
cur_offset_and_size = None
705
yield this_offset, this_data
694
707
def _sort_expand_and_combine(self, offsets, upper_limit):
695
708
"""Helper for readv.
758
771
into a single large request, while retaining the original
760
773
Turns [(15, 10), (25, 10)] => [(15, 20, [(0, 10), (10, 10)])]
774
Note that overlapping requests are not permitted. (So [(15, 10), (20,
775
10)] will raise a ValueError.) This is because the data we access never
776
overlaps, and it allows callers to trust that we only need any byte of
777
data for 1 request (so nothing needs to be buffered to fulfill a second
762
780
:param offsets: A list of (start, length) pairs
764
781
:param limit: Only combine a maximum of this many pairs Some transports
765
782
penalize multiple reads more than others, and sometimes it is
766
783
better to return early.
769
785
:param fudge_factor: All transports have some level of 'it is
770
better to read some more data and throw it away rather
786
better to read some more data and throw it away rather
771
787
than seek', so collapse if we are 'close enough'
773
788
:param max_size: Create coalesced offsets no bigger than this size.
774
789
When a single offset is bigger than 'max_size', it will keep
775
790
its size and be alone in the coalesced offset.
776
791
0 means no maximum size.
778
:return: yield _CoalescedOffset objects, which have members for where
779
to start, how much to read, and how to split those
792
:return: return a list of _CoalescedOffset objects, which have members
793
for where to start, how much to read, and how to split those chunks
783
797
cur = _CoalescedOffset(None, None, [])
798
coalesced_offsets = []
801
# 'unlimited', but we actually take this to mean 100MB buffer limit
802
max_size = 100*1024*1024
785
804
for start, size in offsets:
786
805
end = start + size
789
808
and start >= cur.start
790
809
and (limit <= 0 or len(cur.ranges) < limit)
791
810
and (max_size <= 0 or end - cur.start <= max_size)):
812
raise ValueError('Overlapping range not allowed:'
813
' last range ended at %s, new one starts at %s'
792
815
cur.length = end - cur.start
793
816
cur.ranges.append((start-cur.start, size))
795
818
if cur.start is not None:
819
coalesced_offsets.append(cur)
797
820
cur = _CoalescedOffset(start, size, [(0, size)])
800
823
if cur.start is not None:
824
coalesced_offsets.append(cur)
825
return coalesced_offsets
805
827
def get_multi(self, relpaths, pb=None):
806
828
"""Get a list of file-like objects, one for each entry in relpaths.
1019
1041
source = self.clone(from_relpath)
1020
self.mkdir(to_relpath)
1021
1042
target = self.clone(to_relpath)
1044
source.copy_tree_to_transport(target)
1046
def copy_tree_to_transport(self, to_transport):
1047
"""Copy a subtree from one transport to another.
1049
self.base is used as the source tree root, and to_transport.base
1050
is used as the target. to_transport.base must exist (and be a
1023
1054
directories = ['.']
1024
1055
while directories:
1025
1056
dir = directories.pop()
1028
for path in source.list_dir(dir):
1058
to_transport.mkdir(dir)
1059
for path in self.list_dir(dir):
1029
1060
path = dir + '/' + path
1030
stat = source.stat(path)
1061
stat = self.stat(path)
1031
1062
if S_ISDIR(stat.st_mode):
1032
1063
directories.append(path)
1034
1065
files.append(path)
1035
source.copy_to(files, target)
1066
self.copy_to(files, to_transport)
1037
1068
def rename(self, rel_from, rel_to):
1038
1069
"""Rename a file or directory.
1218
1249
# should be asked to ConnectedTransport only.
1252
def _redirected_to(self, source, target):
1253
"""Returns a transport suitable to re-issue a redirected request.
1255
:param source: The source url as returned by the server.
1256
:param target: The target url as returned by the server.
1258
The redirection can be handled only if the relpath involved is not
1259
renamed by the redirection.
1261
:returns: A transport or None.
1263
# This returns None by default, meaning the transport can't handle the
1222
1269
class _SharedConnection(object):
1223
1270
"""A connection shared between several transports."""
1556
1603
possible_transports.append(t_same_connection)
1557
1604
return t_same_connection
1559
for proto, factory_list in transport_list_registry.iteritems():
1606
for proto, factory_list in transport_list_registry.items():
1560
1607
if proto is not None and base.startswith(proto):
1561
1608
transport, last_err = _try_transport_factories(base, factory_list)
1708
1755
help="Read-only access of branches exported on the web.")
1709
1756
register_transport_proto('https://',
1710
1757
help="Read-only access of branches exported on the web using SSL.")
1758
# The default http implementation is urllib, but https is pycurl if available
1759
register_lazy_transport('http://', 'bzrlib.transport.http._pycurl',
1711
1761
register_lazy_transport('http://', 'bzrlib.transport.http._urllib',
1712
1762
'HttpTransport_urllib')
1713
1763
register_lazy_transport('https://', 'bzrlib.transport.http._urllib',
1714
1764
'HttpTransport_urllib')
1715
register_lazy_transport('http://', 'bzrlib.transport.http._pycurl',
1717
1765
register_lazy_transport('https://', 'bzrlib.transport.http._pycurl',
1718
1766
'PyCurlTransport')
1722
1770
register_transport_proto('aftp://', help="Access using active FTP.")
1723
1771
register_lazy_transport('aftp://', 'bzrlib.transport.ftp', 'FtpTransport')
1773
# Default to trying GSSAPI authentication (if the kerberos module is available)
1774
register_transport_proto('ftp+gssapi://', register_netloc=True)
1775
register_lazy_transport('ftp+gssapi://', 'bzrlib.transport.ftp._gssapi',
1776
'GSSAPIFtpTransport')
1777
register_transport_proto('aftp+gssapi://', register_netloc=True)
1778
register_lazy_transport('aftp+gssapi://', 'bzrlib.transport.ftp._gssapi',
1779
'GSSAPIFtpTransport')
1780
register_transport_proto('ftp+nogssapi://', register_netloc=True)
1781
register_transport_proto('aftp+nogssapi://', register_netloc=True)
1783
register_lazy_transport('ftp://', 'bzrlib.transport.ftp._gssapi',
1784
'GSSAPIFtpTransport')
1785
register_lazy_transport('aftp://', 'bzrlib.transport.ftp._gssapi',
1786
'GSSAPIFtpTransport')
1787
register_lazy_transport('ftp+nogssapi://', 'bzrlib.transport.ftp',
1789
register_lazy_transport('aftp+nogssapi://', 'bzrlib.transport.ftp',
1725
1792
register_transport_proto('memory://')
1726
1793
register_lazy_transport('memory://', 'bzrlib.transport.memory',
1727
1794
'MemoryTransport')
1739
1806
register_lazy_transport('fakenfs+', 'bzrlib.transport.fakenfs',
1740
1807
'FakeNFSTransportDecorator')
1809
register_transport_proto('log+')
1810
register_lazy_transport('log+', 'bzrlib.transport.log', 'TransportLogDecorator')
1742
1812
register_transport_proto('trace+')
1743
1813
register_lazy_transport('trace+', 'bzrlib.transport.trace',
1744
1814
'TransportTraceDecorator')