30
from collections import deque
31
from copy import deepcopy
32
29
from cStringIO import StringIO
33
from bzrlib.lazy_import import lazy_import
34
lazy_import(globals(), """
34
36
from stat import S_ISDIR
36
from unittest import TestSuite
42
40
from bzrlib import (
48
from bzrlib.errors import DependencyNotPresent
49
from bzrlib.osutils import pumpfile
50
48
from bzrlib.symbol_versioning import (
53
50
deprecated_function,
54
51
DEPRECATED_PARAMETER,
58
from bzrlib.trace import mutter, warning
60
# {prefix: [transport_classes]}
61
# Transports are inserted onto the list LIFO and tried in order; as a result
62
# transports provided by plugins are tried first, which is usually what we
64
_protocol_handlers = {
67
def register_transport(prefix, klass, override=DEPRECATED_PARAMETER):
68
"""Register a transport that can be used to open URLs
70
Normally you should use register_lazy_transport, which defers loading the
71
implementation until it's actually used, and so avoids pulling in possibly
72
large implementation libraries.
74
# Note that this code runs very early in library setup -- trace may not be
76
global _protocol_handlers
77
if deprecated_passed(override):
78
warnings.warn("register_transport(override) is deprecated")
79
_protocol_handlers.setdefault(prefix, []).insert(0, klass)
82
def register_lazy_transport(scheme, module, classname):
83
"""Register lazy-loaded transport class.
85
When opening a URL with the given scheme, load the module and then
86
instantiate the particular class.
88
If the module raises DependencyNotPresent when it's imported, it is
89
skipped and another implementation of the protocol is tried. This is
90
intended to be used when the implementation depends on an external
91
implementation that may not be present. If any other error is raised, it
92
propagates up and the attempt to open the url fails.
94
# TODO: If no implementation of a protocol is available because of missing
95
# dependencies, we should perhaps show the message about what dependency
98
mod = __import__(module, globals(), locals(), [classname])
99
klass = getattr(mod, classname)
101
_loader.module = module
102
register_transport(scheme, _loader)
54
from bzrlib.trace import (
57
from bzrlib import registry
60
# a dictionary of open file streams. Keys are absolute paths, values are
105
65
def _get_protocol_handlers():
106
66
"""Return a dictionary of {urlprefix: [factory]}"""
107
return _protocol_handlers
67
return transport_list_registry
110
70
def _set_protocol_handlers(new_handlers):
113
73
WARNING this will remove all build in protocols. Use with care.
115
global _protocol_handlers
116
_protocol_handlers = new_handlers
75
global transport_list_registry
76
transport_list_registry = new_handlers
119
79
def _clear_protocol_handlers():
120
global _protocol_handlers
121
_protocol_handlers = {}
80
global transport_list_registry
81
transport_list_registry = TransportListRegistry()
124
84
def _get_transport_modules():
125
85
"""Return a list of the modules providing transports."""
127
for prefix, factory_list in _protocol_handlers.items():
87
for prefix, factory_list in transport_list_registry.iteritems():
128
88
for factory in factory_list:
129
if factory.__module__ == "bzrlib.transport":
130
# this is a lazy load transport, because no real ones
131
# are directlry in bzrlib.transport
132
modules.add(factory.module)
89
if hasattr(factory, "_module_name"):
90
modules.add(factory._module_name)
134
modules.add(factory.__module__)
92
modules.add(factory._obj.__module__)
93
# Add chroot directly, because there is no handler registered for it.
94
modules.add('bzrlib.transport.chroot')
135
95
result = list(modules)
100
class TransportListRegistry(registry.Registry):
101
"""A registry which simplifies tracking available Transports.
103
A registration of a new protocol requires two step:
104
1) register the prefix with the function register_transport( )
105
2) register the protocol provider with the function
106
register_transport_provider( ) ( and the "lazy" variant )
108
This is needed because:
109
a) a single provider can support multple protcol ( like the ftp
110
provider which supports both the ftp:// and the aftp:// protocols )
111
b) a single protocol can have multiple providers ( like the http://
112
protocol which is supported by both the urllib and pycurl provider )
115
def register_transport_provider(self, key, obj):
116
self.get(key).insert(0, registry._ObjectGetter(obj))
118
def register_lazy_transport_provider(self, key, module_name, member_name):
119
self.get(key).insert(0,
120
registry._LazyObjectGetter(module_name, member_name))
122
def register_transport(self, key, help=None):
123
self.register(key, [], help)
125
def set_default_transport(self, key=None):
126
"""Return either 'key' or the default key if key is None"""
127
self._default_key = key
130
transport_list_registry = TransportListRegistry()
133
def register_transport_proto(prefix, help=None, info=None,
134
register_netloc=False):
135
transport_list_registry.register_transport(prefix, help)
137
if not prefix.endswith('://'):
138
raise ValueError(prefix)
139
register_urlparse_netloc_protocol(prefix[:-3])
142
def register_lazy_transport(prefix, module, classname):
143
if not prefix in transport_list_registry:
144
register_transport_proto(prefix)
145
transport_list_registry.register_lazy_transport_provider(prefix, module, classname)
148
def register_transport(prefix, klass, override=DEPRECATED_PARAMETER):
149
if not prefix in transport_list_registry:
150
register_transport_proto(prefix)
151
transport_list_registry.register_transport_provider(prefix, klass)
140
154
def register_urlparse_netloc_protocol(protocol):
141
155
"""Ensure that protocol is setup to be used with urlparse netloc parsing."""
142
156
if protocol not in urlparse.uses_netloc:
143
157
urlparse.uses_netloc.append(protocol)
147
# TODO: jam 20060606 urls should only be ascii, or they should raise InvalidURL
148
if isinstance(url, unicode):
149
url = url.encode('utf-8')
150
(scheme, netloc, path, params,
151
query, fragment) = urlparse.urlparse(url, allow_fragments=False)
152
username = password = host = port = None
154
username, host = netloc.split('@', 1)
156
username, password = username.split(':', 1)
157
password = urllib.unquote(password)
158
username = urllib.unquote(username)
163
host, port = host.rsplit(':', 1)
167
# TODO: Should this be ConnectionError?
168
raise errors.TransportError('%s: invalid port number' % port)
169
host = urllib.unquote(host)
171
path = urllib.unquote(path)
173
return (scheme, username, password, host, port, path)
160
def _unregister_urlparse_netloc_protocol(protocol):
161
"""Remove protocol from urlparse netloc parsing.
163
Except for tests, you should never use that function. Using it with 'http',
164
for example, will break all http transports.
166
if protocol in urlparse.uses_netloc:
167
urlparse.uses_netloc.remove(protocol)
170
def unregister_transport(scheme, factory):
171
"""Unregister a transport."""
172
l = transport_list_registry.get(scheme)
176
transport_list_registry.get(scheme).remove(i)
179
transport_list_registry.remove(scheme)
176
182
class _CoalescedOffset(object):
393
571
return self.get(relpath).read()
395
def readv(self, relpath, offsets):
396
"""Get parts of the file at the given relative path.
398
:offsets: A list of (offset, size) tuples.
573
@deprecated_method(one_four)
574
def get_smart_client(self):
575
"""Return a smart client for this transport if possible.
577
A smart client doesn't imply the presence of a smart server: it implies
578
that the smart protocol can be tunnelled via this transport.
580
:raises NoSmartServer: if no smart server client is available.
582
raise errors.NoSmartServer(self.base)
584
def get_smart_medium(self):
585
"""Return a smart client medium for this transport if possible.
587
A smart medium doesn't imply the presence of a smart server: it implies
588
that the smart protocol can be tunnelled via this transport.
590
:raises NoSmartMedium: if no smart server medium is available.
592
raise errors.NoSmartMedium(self)
594
@deprecated_method(one_four)
595
def get_shared_medium(self):
596
"""Return a smart client shared medium for this transport if possible.
598
A smart medium doesn't imply the presence of a smart server: it implies
599
that the smart protocol can be tunnelled via this transport.
601
:raises NoSmartMedium: if no smart server medium is available.
603
raise errors.NoSmartMedium(self)
605
def readv(self, relpath, offsets, adjust_for_latency=False,
607
"""Get parts of the file at the given relative path.
609
:param relpath: The path to read data from.
610
:param offsets: A list of (offset, size) tuples.
611
:param adjust_for_latency: Adjust the requested offsets to accomodate
612
transport latency. This may re-order the offsets, expand them to
613
grab adjacent data when there is likely a high cost to requesting
614
data relative to delivering it.
615
:param upper_limit: When adjust_for_latency is True setting upper_limit
616
allows the caller to tell the transport about the length of the
617
file, so that requests are not issued for ranges beyond the end of
618
the file. This matters because some servers and/or transports error
619
in such a case rather than just satisfying the available ranges.
620
upper_limit should always be provided when adjust_for_latency is
621
True, and should be the size of the file in bytes.
622
:return: A list or generator of (offset, data) tuples
624
if adjust_for_latency:
625
# Design note: We may wish to have different algorithms for the
626
# expansion of the offsets per-transport. E.g. for local disk to
627
# use page-aligned expansion. If that is the case consider the
628
# following structure:
629
# - a test that transport.readv uses self._offset_expander or some
630
# similar attribute, to do the expansion
631
# - a test for each transport that it has some known-good offset
633
# - unit tests for each offset expander
634
# - a set of tests for the offset expander interface, giving
635
# baseline behaviour (which the current transport
636
# adjust_for_latency tests could be repurposed to).
637
offsets = self._sort_expand_and_combine(offsets, upper_limit)
638
return self._readv(relpath, offsets)
640
def _readv(self, relpath, offsets):
641
"""Get parts of the file at the given relative path.
643
:param relpath: The path to read.
644
:param offsets: A list of (offset, size) tuples.
399
645
:return: A list or generator of (offset, data) tuples
404
650
fp = self.get(relpath)
405
return self._seek_and_read(fp, offsets)
651
return self._seek_and_read(fp, offsets, relpath)
407
def _seek_and_read(self, fp, offsets):
653
def _seek_and_read(self, fp, offsets, relpath='<unknown>'):
408
654
"""An implementation of readv that uses fp.seek and fp.read.
410
656
This uses _coalesce_offsets to issue larger reads and fewer seeks.
442
691
yield cur_offset_and_size[0], this_data
443
692
cur_offset_and_size = offset_stack.next()
694
def _sort_expand_and_combine(self, offsets, upper_limit):
697
:param offsets: A readv vector - (offset, length) tuples.
698
:param upper_limit: The highest byte offset that may be requested.
699
:return: A readv vector that will read all the regions requested by
700
offsets, in start-to-end order, with no duplicated regions,
701
expanded by the transports recommended page size.
703
offsets = sorted(offsets)
704
# short circuit empty requests
705
if len(offsets) == 0:
707
# Quick thunk to stop this function becoming a generator
708
# itself, rather we return a generator that has nothing to
712
return empty_yielder()
713
# expand by page size at either end
714
maximum_expansion = self.recommended_page_size()
716
for offset, length in offsets:
717
expansion = maximum_expansion - length
719
# we're asking for more than the minimum read anyway.
721
reduction = expansion / 2
722
new_offset = offset - reduction
723
new_length = length + expansion
725
# don't ask for anything < 0
727
if (upper_limit is not None and
728
new_offset + new_length > upper_limit):
729
new_length = upper_limit - new_offset
730
new_offsets.append((new_offset, new_length))
731
# combine the expanded offsets
733
current_offset, current_length = new_offsets[0]
734
current_finish = current_length + current_offset
735
for offset, length in new_offsets[1:]:
736
finish = offset + length
737
if offset > current_finish:
738
# there is a gap, output the current accumulator and start
739
# a new one for the region we're examining.
740
offsets.append((current_offset, current_length))
741
current_offset = offset
742
current_length = length
743
current_finish = finish
745
if finish > current_finish:
746
# extend the current accumulator to the end of the region
748
current_finish = finish
749
current_length = finish - current_offset
750
offsets.append((current_offset, current_length))
446
def _coalesce_offsets(offsets, limit, fudge_factor):
754
def _coalesce_offsets(offsets, limit=0, fudge_factor=0, max_size=0):
447
755
"""Yield coalesced offsets.
449
757
With a long list of neighboring requests, combine them
902
1212
# several questions about the transport.
906
# jam 20060426 For compatibility we copy the functions here
907
# TODO: The should be marked as deprecated
908
urlescape = urlutils.escape
909
urlunescape = urlutils.unescape
910
_urlRE = re.compile(r'^(?P<proto>[^:/\\]+)://(?P<path>.*)$')
913
def get_transport(base):
1215
def _reuse_for(self, other_base):
1216
# This is really needed for ConnectedTransport only, but it's easier to
1217
# have Transport refuses to be reused than testing that the reuse
1218
# should be asked to ConnectedTransport only.
1222
class _SharedConnection(object):
1223
"""A connection shared between several transports."""
1225
def __init__(self, connection=None, credentials=None, base=None):
1228
:param connection: An opaque object specific to each transport.
1230
:param credentials: An opaque object containing the credentials used to
1231
create the connection.
1233
self.connection = connection
1234
self.credentials = credentials
1238
class ConnectedTransport(Transport):
1239
"""A transport connected to a remote server.
1241
This class provide the basis to implement transports that need to connect
1244
Host and credentials are available as private attributes, cloning preserves
1245
them and share the underlying, protocol specific, connection.
1248
def __init__(self, base, _from_transport=None):
1251
The caller should ensure that _from_transport points at the same host
1254
:param base: transport root URL
1256
:param _from_transport: optional transport to build from. The built
1257
transport will share the connection with this transport.
1259
if not base.endswith('/'):
1262
self._user, self._password,
1263
self._host, self._port,
1264
self._path) = self._split_url(base)
1265
if _from_transport is not None:
1266
# Copy the password as it does not appear in base and will be lost
1267
# otherwise. It can appear in the _split_url above if the user
1268
# provided it on the command line. Otherwise, daughter classes will
1269
# prompt the user for one when appropriate.
1270
self._password = _from_transport._password
1272
base = self._unsplit_url(self._scheme,
1273
self._user, self._password,
1274
self._host, self._port,
1277
super(ConnectedTransport, self).__init__(base)
1278
if _from_transport is None:
1279
self._shared_connection = _SharedConnection()
1281
self._shared_connection = _from_transport._shared_connection
1283
def clone(self, offset=None):
1284
"""Return a new transport with root at self.base + offset
1286
We leave the daughter classes take advantage of the hint
1287
that it's a cloning not a raw creation.
1290
return self.__class__(self.base, _from_transport=self)
1292
return self.__class__(self.abspath(offset), _from_transport=self)
1295
def _split_url(url):
1297
Extract the server address, the credentials and the path from the url.
1299
user, password, host and path should be quoted if they contain reserved
1302
:param url: an quoted url
1304
:return: (scheme, user, password, host, port, path) tuple, all fields
1307
if isinstance(url, unicode):
1308
raise errors.InvalidURL('should be ascii:\n%r' % url)
1309
url = url.encode('utf-8')
1310
(scheme, netloc, path, params,
1311
query, fragment) = urlparse.urlparse(url, allow_fragments=False)
1312
user = password = host = port = None
1314
user, host = netloc.rsplit('@', 1)
1316
user, password = user.split(':', 1)
1317
password = urllib.unquote(password)
1318
user = urllib.unquote(user)
1323
host, port = host.rsplit(':', 1)
1327
raise errors.InvalidURL('invalid port number %s in url:\n%s' %
1330
raise errors.InvalidURL('Host empty in: %s' % url)
1332
host = urllib.unquote(host)
1333
path = urllib.unquote(path)
1335
return (scheme, user, password, host, port, path)
1338
def _unsplit_url(scheme, user, password, host, port, path):
1340
Build the full URL for the given already URL encoded path.
1342
user, password, host and path will be quoted if they contain reserved
1345
:param scheme: protocol
1349
:param password: associated password
1351
:param host: the server address
1353
:param port: the associated port
1355
:param path: the absolute path on the server
1357
:return: The corresponding URL.
1359
netloc = urllib.quote(host)
1360
if user is not None:
1361
# Note that we don't put the password back even if we
1362
# have one so that it doesn't get accidentally
1364
netloc = '%s@%s' % (urllib.quote(user), netloc)
1365
if port is not None:
1366
netloc = '%s:%d' % (netloc, port)
1367
path = urllib.quote(path)
1368
return urlparse.urlunparse((scheme, netloc, path, None, None, None))
1370
def relpath(self, abspath):
1371
"""Return the local path portion from a given absolute path"""
1372
scheme, user, password, host, port, path = self._split_url(abspath)
1374
if (scheme != self._scheme):
1375
error.append('scheme mismatch')
1376
if (user != self._user):
1377
error.append('user name mismatch')
1378
if (host != self._host):
1379
error.append('host mismatch')
1380
if (port != self._port):
1381
error.append('port mismatch')
1382
if not (path == self._path[:-1] or path.startswith(self._path)):
1383
error.append('path mismatch')
1385
extra = ', '.join(error)
1386
raise errors.PathNotChild(abspath, self.base, extra=extra)
1387
pl = len(self._path)
1388
return path[pl:].strip('/')
1390
def abspath(self, relpath):
1391
"""Return the full url to the given relative path.
1393
:param relpath: the relative path urlencoded
1395
:returns: the Unicode version of the absolute path for relpath.
1397
relative = urlutils.unescape(relpath).encode('utf-8')
1398
path = self._combine_paths(self._path, relative)
1399
return self._unsplit_url(self._scheme, self._user, self._password,
1400
self._host, self._port,
1403
def _remote_path(self, relpath):
1404
"""Return the absolute path part of the url to the given relative path.
1406
This is the path that the remote server expect to receive in the
1407
requests, daughter classes should redefine this method if needed and
1408
use the result to build their requests.
1410
:param relpath: the path relative to the transport base urlencoded.
1412
:return: the absolute Unicode path on the server,
1414
relative = urlutils.unescape(relpath).encode('utf-8')
1415
remote_path = self._combine_paths(self._path, relative)
1418
def _get_shared_connection(self):
1419
"""Get the object shared amongst cloned transports.
1421
This should be used only by classes that needs to extend the sharing
1422
with objects other than transports.
1424
Use _get_connection to get the connection itself.
1426
return self._shared_connection
1428
def _set_connection(self, connection, credentials=None):
1429
"""Record a newly created connection with its associated credentials.
1431
Note: To ensure that connection is still shared after a temporary
1432
failure and a new one needs to be created, daughter classes should
1433
always call this method to set the connection and do so each time a new
1434
connection is created.
1436
:param connection: An opaque object representing the connection used by
1439
:param credentials: An opaque object representing the credentials
1440
needed to create the connection.
1442
self._shared_connection.connection = connection
1443
self._shared_connection.credentials = credentials
1445
def _get_connection(self):
1446
"""Returns the transport specific connection object."""
1447
return self._shared_connection.connection
1449
def _get_credentials(self):
1450
"""Returns the credentials used to establish the connection."""
1451
return self._shared_connection.credentials
1453
def _update_credentials(self, credentials):
1454
"""Update the credentials of the current connection.
1456
Some protocols can renegociate the credentials within a connection,
1457
this method allows daughter classes to share updated credentials.
1459
:param credentials: the updated credentials.
1461
# We don't want to call _set_connection here as we are only updating
1462
# the credentials not creating a new connection.
1463
self._shared_connection.credentials = credentials
1465
def _reuse_for(self, other_base):
1466
"""Returns a transport sharing the same connection if possible.
1468
Note: we share the connection if the expected credentials are the
1469
same: (host, port, user). Some protocols may disagree and redefine the
1470
criteria in daughter classes.
1472
Note: we don't compare the passwords here because other_base may have
1473
been obtained from an existing transport.base which do not mention the
1476
:param other_base: the URL we want to share the connection with.
1478
:return: A new transport or None if the connection cannot be shared.
1481
(scheme, user, password,
1482
host, port, path) = self._split_url(other_base)
1483
except errors.InvalidURL:
1484
# No hope in trying to reuse an existing transport for an invalid
1489
# Don't compare passwords, they may be absent from other_base or from
1490
# self and they don't carry more information than user anyway.
1491
if (scheme == self._scheme
1492
and user == self._user
1493
and host == self._host
1494
and port == self._port):
1495
if not path.endswith('/'):
1496
# This normally occurs at __init__ time, but it's easier to do
1497
# it now to avoid creating two transports for the same base.
1499
if self._path == path:
1500
# shortcut, it's really the same transport
1502
# We don't call clone here because the intent is different: we
1503
# build a new transport on a different base (which may be totally
1504
# unrelated) but we share the connection.
1505
transport = self.__class__(other_base, _from_transport=self)
1509
# We try to recognize an url lazily (ignoring user, password, etc)
1510
_urlRE = re.compile(r'^(?P<proto>[^:/\\]+)://(?P<rest>.*)$')
1512
def get_transport(base, possible_transports=None):
914
1513
"""Open a transport to access a URL or directory.
916
base is either a URL or a directory name.
1515
:param base: either a URL or a directory name.
1517
:param transports: optional reusable transports list. If not None, created
1518
transports will be added to the list.
1520
:return: A new transport optionally sharing its connection with one of
1521
possible_transports.
918
# TODO: give a better error if base looks like a url but there's no
919
# handler for the scheme?
920
global _protocol_handlers
921
1523
if base is None:
1526
from bzrlib.directory_service import directories
1527
base = directories.dereference(base)
926
1529
def convert_path_to_url(base, error_str):
927
1530
m = _urlRE.match(base)
929
1532
# This looks like a URL, but we weren't able to
930
1533
# instantiate it as such raise an appropriate error
1534
# FIXME: we have a 'error_str' unused and we use last_err below
931
1535
raise errors.UnsupportedProtocol(base, last_err)
932
1536
# This doesn't look like a protocol, consider it a local path
933
1537
new_base = urlutils.local_path_to_url(base)
1001
1664
raise NotImplementedError
1003
1666
def get_bogus_url(self):
1004
"""Return a url for this protocol, that will fail to connect."""
1667
"""Return a url for this protocol, that will fail to connect.
1669
This may raise NotImplementedError to indicate that this server cannot
1005
1672
raise NotImplementedError
1008
class TransportTestProviderAdapter(object):
1009
"""A tool to generate a suite testing all transports for a single test.
1011
This is done by copying the test once for each transport and injecting
1012
the transport_class and transport_server classes into each copy. Each copy
1013
is also given a new id() to make it easy to identify.
1016
def adapt(self, test):
1017
result = TestSuite()
1018
for klass, server_factory in self._test_permutations():
1019
new_test = deepcopy(test)
1020
new_test.transport_class = klass
1021
new_test.transport_server = server_factory
1022
def make_new_test_id():
1023
new_id = "%s(%s)" % (new_test.id(), server_factory.__name__)
1024
return lambda: new_id
1025
new_test.id = make_new_test_id()
1026
result.addTest(new_test)
1029
def get_transport_test_permutations(self, module):
1030
"""Get the permutations module wants to have tested."""
1031
if getattr(module, 'get_test_permutations', None) is None:
1032
warning("transport module %s doesn't provide get_test_permutations()"
1035
return module.get_test_permutations()
1037
def _test_permutations(self):
1038
"""Return a list of the klass, server_factory pairs to test."""
1040
for module in _get_transport_modules():
1042
result.extend(self.get_transport_test_permutations(reduce(getattr,
1043
(module).split('.')[1:],
1044
__import__(module))))
1045
except errors.DependencyNotPresent, e:
1046
# Continue even if a dependency prevents us
1047
# from running this test
1052
class TransportLogger(object):
1053
"""Adapt a transport to get clear logging data on api calls.
1055
Feel free to extend to log whatever calls are of interest.
1058
def __init__(self, adapted):
1059
self._adapted = adapted
1062
def get(self, name):
1063
self._calls.append((name,))
1064
return self._adapted.get(name)
1066
def __getattr__(self, name):
1067
"""Thunk all undefined access through to self._adapted."""
1068
# raise AttributeError, name
1069
return getattr(self._adapted, name)
1071
def readv(self, name, offsets):
1072
self._calls.append((name, offsets))
1073
return self._adapted.readv(name, offsets)
1076
1675
# None is the default transport, for things with no url scheme
1077
register_lazy_transport(None, 'bzrlib.transport.local', 'LocalTransport')
1676
register_transport_proto('file://',
1677
help="Access using the standard filesystem (default)")
1078
1678
register_lazy_transport('file://', 'bzrlib.transport.local', 'LocalTransport')
1679
transport_list_registry.set_default_transport("file://")
1681
register_transport_proto('sftp://',
1682
help="Access using SFTP (most SSH servers provide SFTP).",
1683
register_netloc=True)
1079
1684
register_lazy_transport('sftp://', 'bzrlib.transport.sftp', 'SFTPTransport')
1685
# Decorated http transport
1686
register_transport_proto('http+urllib://',
1687
# help="Read-only access of branches exported on the web."
1688
register_netloc=True)
1080
1689
register_lazy_transport('http+urllib://', 'bzrlib.transport.http._urllib',
1081
1690
'HttpTransport_urllib')
1691
register_transport_proto('https+urllib://',
1692
# help="Read-only access of branches exported on the web using SSL."
1693
register_netloc=True)
1082
1694
register_lazy_transport('https+urllib://', 'bzrlib.transport.http._urllib',
1083
1695
'HttpTransport_urllib')
1696
register_transport_proto('http+pycurl://',
1697
# help="Read-only access of branches exported on the web."
1698
register_netloc=True)
1084
1699
register_lazy_transport('http+pycurl://', 'bzrlib.transport.http._pycurl',
1085
1700
'PyCurlTransport')
1701
register_transport_proto('https+pycurl://',
1702
# help="Read-only access of branches exported on the web using SSL."
1703
register_netloc=True)
1086
1704
register_lazy_transport('https+pycurl://', 'bzrlib.transport.http._pycurl',
1087
1705
'PyCurlTransport')
1706
# Default http transports (last declared wins (if it can be imported))
1707
register_transport_proto('http://',
1708
help="Read-only access of branches exported on the web.")
1709
register_transport_proto('https://',
1710
help="Read-only access of branches exported on the web using SSL.")
1088
1711
register_lazy_transport('http://', 'bzrlib.transport.http._urllib',
1089
1712
'HttpTransport_urllib')
1090
1713
register_lazy_transport('https://', 'bzrlib.transport.http._urllib',
1091
1714
'HttpTransport_urllib')
1092
register_lazy_transport('http://', 'bzrlib.transport.http._pycurl', 'PyCurlTransport')
1093
register_lazy_transport('https://', 'bzrlib.transport.http._pycurl', 'PyCurlTransport')
1715
register_lazy_transport('http://', 'bzrlib.transport.http._pycurl',
1717
register_lazy_transport('https://', 'bzrlib.transport.http._pycurl',
1720
register_transport_proto('ftp://', help="Access using passive FTP.")
1094
1721
register_lazy_transport('ftp://', 'bzrlib.transport.ftp', 'FtpTransport')
1722
register_transport_proto('aftp://', help="Access using active FTP.")
1095
1723
register_lazy_transport('aftp://', 'bzrlib.transport.ftp', 'FtpTransport')
1096
register_lazy_transport('memory://', 'bzrlib.transport.memory', 'MemoryTransport')
1097
register_lazy_transport('readonly+', 'bzrlib.transport.readonly', 'ReadonlyTransportDecorator')
1098
register_lazy_transport('fakenfs+', 'bzrlib.transport.fakenfs', 'FakeNFSTransportDecorator')
1099
register_lazy_transport('vfat+',
1725
register_transport_proto('memory://')
1726
register_lazy_transport('memory://', 'bzrlib.transport.memory',
1729
# chroots cannot be implicitly accessed, they must be explicitly created:
1730
register_transport_proto('chroot+')
1732
register_transport_proto('readonly+',
1733
# help="This modifier converts any transport to be readonly."
1735
register_lazy_transport('readonly+', 'bzrlib.transport.readonly',
1736
'ReadonlyTransportDecorator')
1738
register_transport_proto('fakenfs+')
1739
register_lazy_transport('fakenfs+', 'bzrlib.transport.fakenfs',
1740
'FakeNFSTransportDecorator')
1742
register_transport_proto('trace+')
1743
register_lazy_transport('trace+', 'bzrlib.transport.trace',
1744
'TransportTraceDecorator')
1746
register_transport_proto('unlistable+')
1747
register_lazy_transport('unlistable+', 'bzrlib.transport.unlistable',
1748
'UnlistableTransportDecorator')
1750
register_transport_proto('brokenrename+')
1751
register_lazy_transport('brokenrename+', 'bzrlib.transport.brokenrename',
1752
'BrokenRenameTransportDecorator')
1754
register_transport_proto('vfat+')
1755
register_lazy_transport('vfat+',
1100
1756
'bzrlib.transport.fakevfat',
1101
1757
'FakeVFATTransportDecorator')
1759
register_transport_proto('nosmart+')
1760
register_lazy_transport('nosmart+', 'bzrlib.transport.nosmart',
1761
'NoSmartTransportDecorator')
1763
# These two schemes were registered, but don't seem to have an actual transport
1764
# protocol registered
1765
for scheme in ['ssh', 'bzr+loopback']:
1766
register_urlparse_netloc_protocol(scheme)
1769
register_transport_proto('bzr://',
1770
help="Fast access using the Bazaar smart server.",
1771
register_netloc=True)
1773
register_lazy_transport('bzr://', 'bzrlib.transport.remote',
1774
'RemoteTCPTransport')
1775
register_transport_proto('bzr-v2://', register_netloc=True)
1777
register_lazy_transport('bzr-v2://', 'bzrlib.transport.remote',
1778
'RemoteTCPTransportV2Only')
1779
register_transport_proto('bzr+http://',
1780
# help="Fast access using the Bazaar smart server over HTTP."
1781
register_netloc=True)
1782
register_lazy_transport('bzr+http://', 'bzrlib.transport.remote',
1783
'RemoteHTTPTransport')
1784
register_transport_proto('bzr+https://',
1785
# help="Fast access using the Bazaar smart server over HTTPS."
1786
register_netloc=True)
1787
register_lazy_transport('bzr+https://',
1788
'bzrlib.transport.remote',
1789
'RemoteHTTPTransport')
1790
register_transport_proto('bzr+ssh://',
1791
help="Fast access using the Bazaar smart server over SSH.",
1792
register_netloc=True)
1793
register_lazy_transport('bzr+ssh://', 'bzrlib.transport.remote',
1794
'RemoteSSHTransport')