1
# Copyright (C) 2005-2011, 2016, 2017 Canonical Ltd
1
# Copyright (C) 2005 Robey Pointer <robey@lag.net>, Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
17
"""Implementation of Transport over SFTP, using paramiko."""
19
from __future__ import absolute_import
21
# TODO: Remove the transport-based lock_read and lock_write methods. They'll
22
# then raise TransportNotPossible, which will break remote access to any
23
# formats which rely on OS-level locks. That should be fine as those formats
24
# are pretty old, but these combinations may have to be removed from the test
25
# suite. Those formats all date back to 0.7; so we should be able to remove
26
# these methods when we officially drop support for those formats.
44
from ..errors import (FileExists,
51
from ..osutils import fancy_rename
52
from ..sixish import (
55
from ..trace import mutter, warning
56
from ..transport import (
63
# Disable one particular warning that comes from paramiko in Python2.5; if
64
# this is emitted at the wrong time it tends to cause spurious test failures
65
# or at least noise in the test case::
67
# [1770/7639 in 86s, 1 known failures, 50 skipped, 2 missing features]
68
# test_permissions.TestSftpPermissions.test_new_files
69
# /var/lib/python-support/python2.5/paramiko/message.py:226: DeprecationWarning: integer argument expected, got float
70
# self.packet.write(struct.pack('>I', n))
71
warnings.filterwarnings('ignore',
72
'integer argument expected, got float',
73
category=DeprecationWarning,
74
module='paramiko.message')
32
from bzrlib.errors import (FileExists,
33
TransportNotPossible, NoSuchFile, PathNotChild,
36
from bzrlib.config import config_dir
37
from bzrlib.trace import mutter, warning, error
38
from bzrlib.transport import Transport, register_transport
78
except ImportError as e:
79
raise ParamikoNotPresent(e)
44
error('The SFTP transport requires paramiko.')
81
47
from paramiko.sftp import (SFTP_FLAG_WRITE, SFTP_FLAG_CREATE,
82
48
SFTP_FLAG_EXCL, SFTP_FLAG_TRUNC,
83
49
CMD_HANDLE, CMD_OPEN)
84
50
from paramiko.sftp_attr import SFTPAttributes
85
51
from paramiko.sftp_file import SFTPFile
88
# GZ 2017-05-25: Some dark hackery to monkeypatch out issues with paramiko's
89
# Python 3 compatibility code. Replace broken b() and asbytes() code.
91
from paramiko.py3compat import b as _bad
92
from paramiko.common import asbytes as _bad_asbytes
96
def _b_for_broken_paramiko(s, encoding='utf8'):
97
"""Hacked b() that does not raise TypeError."""
98
# https://github.com/paramiko/paramiko/issues/967
99
if not isinstance(s, bytes):
100
encode = getattr(s, 'encode', None)
101
if encode is not None:
102
return encode(encoding)
103
# Would like to pass buffer objects along, but have to realise.
104
tobytes = getattr(s, 'tobytes', None)
105
if tobytes is not None:
109
def _asbytes_for_broken_paramiko(s):
110
"""Hacked asbytes() that does not raise Exception."""
111
# https://github.com/paramiko/paramiko/issues/968
112
if not isinstance(s, bytes):
113
encode = getattr(s, 'encode', None)
114
if encode is not None:
115
return encode('utf8')
116
asbytes = getattr(s, 'asbytes', None)
117
if asbytes is not None:
121
_bad.__code__ = _b_for_broken_paramiko.__code__
122
_bad_asbytes.__code__ = _asbytes_for_broken_paramiko.__code__
52
from paramiko.sftp_client import SFTPClient
54
if 'sftp' not in urlparse.uses_netloc: urlparse.uses_netloc.append('sftp')
58
if sys.platform == 'win32':
59
# close_fds not supported on win32
63
def _get_ssh_vendor():
64
"""Find out what version of SSH is on the system."""
66
if _ssh_vendor is not None:
72
p = subprocess.Popen(['ssh', '-V'],
74
stdin=subprocess.PIPE,
75
stdout=subprocess.PIPE,
76
stderr=subprocess.PIPE)
77
returncode = p.returncode
78
stdout, stderr = p.communicate()
82
if 'OpenSSH' in stderr:
83
mutter('ssh implementation is OpenSSH')
84
_ssh_vendor = 'openssh'
85
elif 'SSH Secure Shell' in stderr:
86
mutter('ssh implementation is SSH Corp.')
89
if _ssh_vendor != 'none':
92
# XXX: 20051123 jamesh
93
# A check for putty's plink or lsh would go here.
95
mutter('falling back to paramiko implementation')
100
"""A socket-like object that talks to an ssh subprocess via pipes."""
101
def __init__(self, hostname, port=None, user=None):
102
vendor = _get_ssh_vendor()
103
assert vendor in ['openssh', 'ssh']
104
if vendor == 'openssh':
106
'-oForwardX11=no', '-oForwardAgent=no',
107
'-oClearAllForwardings=yes', '-oProtocol=2',
108
'-oNoHostAuthenticationForLocalhost=yes']
110
args.extend(['-p', str(port)])
112
args.extend(['-l', user])
113
args.extend(['-s', hostname, 'sftp'])
114
elif vendor == 'ssh':
117
args.extend(['-p', str(port)])
119
args.extend(['-l', user])
120
args.extend(['-s', 'sftp', hostname])
122
self.proc = subprocess.Popen(args, close_fds=_close_fds,
123
stdin=subprocess.PIPE,
124
stdout=subprocess.PIPE)
126
def send(self, data):
127
return os.write(self.proc.stdin.fileno(), data)
129
def recv(self, count):
130
return os.read(self.proc.stdout.fileno(), count)
133
self.proc.stdin.close()
134
self.proc.stdout.close()
141
# This is a weakref dictionary, so that we can reuse connections
142
# that are still active. Long term, it might be nice to have some
143
# sort of expiration policy, such as disconnect if inactive for
144
# X seconds. But that requires a lot more fanciness.
145
_connected_hosts = weakref.WeakValueDictionary()
147
def load_host_keys():
149
Load system host keys (probably doesn't work on windows) and any
150
"discovered" keys from previous sessions.
152
global SYSTEM_HOSTKEYS, BZR_HOSTKEYS
154
SYSTEM_HOSTKEYS = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
156
mutter('failed to load system host keys: ' + str(e))
157
bzr_hostkey_path = os.path.join(config_dir(), 'ssh_host_keys')
159
BZR_HOSTKEYS = paramiko.util.load_host_keys(bzr_hostkey_path)
161
mutter('failed to load bzr host keys: ' + str(e))
164
def save_host_keys():
166
Save "discovered" host keys in $(config)/ssh_host_keys/.
168
global SYSTEM_HOSTKEYS, BZR_HOSTKEYS
169
bzr_hostkey_path = os.path.join(config_dir(), 'ssh_host_keys')
170
if not os.path.isdir(config_dir()):
171
os.mkdir(config_dir())
173
f = open(bzr_hostkey_path, 'w')
174
f.write('# SSH host keys collected by bzr\n')
175
for hostname, keys in BZR_HOSTKEYS.iteritems():
176
for keytype, key in keys.iteritems():
177
f.write('%s %s %s\n' % (hostname, keytype, key.get_base64()))
180
mutter('failed to save bzr host keys: ' + str(e))
125
183
class SFTPLock(object):
126
"""This fakes a lock in a remote location.
128
A present lock is indicated just by the existence of a file. This
129
doesn't work well on all transports and they are only used in
130
deprecated storage formats.
184
"""This fakes a lock in a remote location."""
133
185
__slots__ = ['path', 'lock_path', 'lock_file', 'transport']
135
186
def __init__(self, path, transport):
187
assert isinstance(transport, SFTPTransport)
136
189
self.lock_file = None
138
191
self.lock_path = path + '.write-lock'
139
192
self.transport = transport
141
# RBC 20060103 FIXME should we be using private methods here ?
142
abspath = transport._remote_path(self.lock_path)
143
self.lock_file = transport._sftp_open_exclusive(abspath)
194
self.lock_file = transport._sftp_open_exclusive(self.lock_path)
144
195
except FileExists:
145
196
raise LockError('File %r already locked' % (self.path,))
199
"""Should this warn, or actually try to cleanup?"""
201
warn("SFTPLock %r not explicitly unlocked" % (self.path,))
147
204
def unlock(self):
148
205
if not self.lock_file:
155
212
# What specific errors should we catch here?
159
class _SFTPReadvHelper(object):
160
"""A class to help with managing the state of a readv request."""
162
# See _get_requests for an explanation.
163
_max_request_size = 32768
165
def __init__(self, original_offsets, relpath, _report_activity):
166
"""Create a new readv helper.
168
:param original_offsets: The original requests given by the caller of
170
:param relpath: The name of the file (if known)
171
:param _report_activity: A Transport._report_activity bound method,
172
to be called as data arrives.
174
self.original_offsets = list(original_offsets)
175
self.relpath = relpath
176
self._report_activity = _report_activity
178
def _get_requests(self):
179
"""Break up the offsets into individual requests over sftp.
181
The SFTP spec only requires implementers to support 32kB requests. We
182
could try something larger (openssh supports 64kB), but then we have to
183
handle requests that fail.
184
So instead, we just break up our maximum chunks into 32kB chunks, and
185
asyncronously requests them.
186
Newer versions of paramiko would do the chunking for us, but we want to
187
start processing results right away, so we do it ourselves.
189
# TODO: Because we issue async requests, we don't 'fudge' any extra
190
# data. I'm not 100% sure that is the best choice.
192
# The first thing we do, is to collapse the individual requests as much
193
# as possible, so we don't issues requests <32kB
194
sorted_offsets = sorted(self.original_offsets)
195
coalesced = list(ConnectedTransport._coalesce_offsets(sorted_offsets,
196
limit=0, fudge_factor=0))
198
for c_offset in coalesced:
199
start = c_offset.start
200
size = c_offset.length
202
# Break this up into 32kB requests
204
next_size = min(size, self._max_request_size)
205
requests.append((start, next_size))
208
if 'sftp' in debug.debug_flags:
209
mutter('SFTP.readv(%s) %s offsets => %s coalesced => %s requests',
210
self.relpath, len(sorted_offsets), len(coalesced),
214
def request_and_yield_offsets(self, fp):
215
"""Request the data from the remote machine, yielding the results.
217
:param fp: A Paramiko SFTPFile object that supports readv.
218
:return: Yield the data requested by the original readv caller, one by
221
requests = self._get_requests()
222
offset_iter = iter(self.original_offsets)
223
cur_offset, cur_size = next(offset_iter)
224
# paramiko .readv() yields strings that are in the order of the requests
225
# So we track the current request to know where the next data is
226
# being returned from.
232
# This is used to buffer chunks which we couldn't process yet
233
# It is (start, end, data) tuples.
235
# Create an 'unlimited' data stream, so we stop based on requests,
236
# rather than just because the data stream ended. This lets us detect
238
data_stream = itertools.chain(fp.readv(requests),
239
itertools.repeat(None))
240
for (start, length), data in zip(requests, data_stream):
242
if cur_coalesced is not None:
243
raise errors.ShortReadvError(self.relpath,
244
start, length, len(data))
245
if len(data) != length:
246
raise errors.ShortReadvError(self.relpath,
247
start, length, len(data))
248
self._report_activity(length, 'read')
250
# This is the first request, just buffer it
251
buffered_data = [data]
252
buffered_len = length
254
elif start == last_end:
255
# The data we are reading fits neatly on the previous
256
# buffer, so this is all part of a larger coalesced range.
257
buffered_data.append(data)
258
buffered_len += length
215
class SFTPTransport (Transport):
217
Transport implementation for SFTP access.
219
_do_prefetch = False # Right now Paramiko's prefetch support causes things to hang
221
def __init__(self, base, clone_from=None):
222
assert base.startswith('sftp://')
223
self._parse_url(base)
224
base = self._unparse_url()
225
super(SFTPTransport, self).__init__(base)
226
if clone_from is None:
229
# use the same ssh connection, etc
230
self._sftp = clone_from._sftp
231
# super saves 'self.base'
233
def should_cache(self):
235
Return True if the data pulled across should be cached locally.
239
def clone(self, offset=None):
241
Return a new SFTPTransport with root at self.base + offset.
242
We share the same SFTP session between such transports, because it's
243
fairly expensive to set them up.
246
return SFTPTransport(self.base, self)
248
return SFTPTransport(self.abspath(offset), self)
250
def abspath(self, relpath):
252
Return the full url to the given relative path.
254
@param relpath: the relative path or path components
255
@type relpath: str or list
257
return self._unparse_url(self._abspath(relpath))
259
def _abspath(self, relpath):
260
"""Return the absolute path segment without the SFTP URL."""
261
# FIXME: share the common code across transports
262
assert isinstance(relpath, basestring)
263
relpath = [urllib.unquote(relpath)]
264
basepath = self._path.split('/')
265
if len(basepath) > 0 and basepath[-1] == '':
266
basepath = basepath[:-1]
270
if len(basepath) == 0:
271
# In most filesystems, a request for the parent
272
# of root, just returns root.
260
# We have an 'interrupt' in the data stream. So we know we are
261
# at a request boundary.
263
# We haven't consumed the buffer so far, so put it into
264
# data_chunks, and continue.
265
buffered = b''.join(buffered_data)
266
data_chunks.append((input_start, buffered))
268
buffered_data = [data]
269
buffered_len = length
270
last_end = start + length
271
if input_start == cur_offset and cur_size <= buffered_len:
272
# Simplify the next steps a bit by transforming buffered_data
273
# into a single string. We also have the nice property that
274
# when there is only one string ''.join([x]) == x, so there is
276
buffered = b''.join(buffered_data)
277
# Clean out buffered data so that we keep memory
281
# TODO: We *could* also consider the case where cur_offset is in
282
# in the buffered range, even though it doesn't *start*
283
# the buffered range. But for packs we pretty much always
284
# read in order, so you won't get any extra data in the
286
while (input_start == cur_offset
287
and (buffered_offset + cur_size) <= buffered_len):
288
# We've buffered enough data to process this request, spit it
290
cur_data = buffered[buffered_offset:buffered_offset + cur_size]
291
# move the direct pointer into our buffered data
292
buffered_offset += cur_size
293
# Move the start-of-buffer pointer
294
input_start += cur_size
295
# Yield the requested data
296
yield cur_offset, cur_data
298
cur_offset, cur_size = next(offset_iter)
299
except StopIteration:
301
# at this point, we've consumed as much of buffered as we can,
302
# so break off the portion that we consumed
303
if buffered_offset == len(buffered_data):
304
# No tail to leave behind
308
buffered = buffered[buffered_offset:]
309
buffered_data = [buffered]
310
buffered_len = len(buffered)
311
# now that the data stream is done, close the handle
314
buffered = b''.join(buffered_data)
316
data_chunks.append((input_start, buffered))
318
if 'sftp' in debug.debug_flags:
319
mutter('SFTP readv left with %d out-of-order bytes',
320
sum(len(x[1]) for x in data_chunks))
321
# We've processed all the readv data, at this point, anything we
322
# couldn't process is in data_chunks. This doesn't happen often, so
323
# this code path isn't optimized
324
# We use an interesting process for data_chunks
325
# Specifically if we have "bisect_left([(start, len, entries)],
327
# If start == qstart, then we get the specific node. Otherwise we
328
# get the previous node
330
idx = bisect.bisect_left(data_chunks, (cur_offset,))
331
if idx < len(data_chunks) and data_chunks[idx][0] == cur_offset:
332
# The data starts here
333
data = data_chunks[idx][1][:cur_size]
335
# The data is in a portion of a previous page
337
sub_offset = cur_offset - data_chunks[idx][0]
338
data = data_chunks[idx][1]
339
data = data[sub_offset:sub_offset + cur_size]
341
# We are missing the page where the data should be found,
344
if len(data) != cur_size:
345
raise AssertionError('We must have miscalulated.'
346
' We expected %d bytes, but only found %d'
347
% (cur_size, len(data)))
348
yield cur_offset, data
350
cur_offset, cur_size = next(offset_iter)
351
except StopIteration:
355
class SFTPTransport(ConnectedTransport):
356
"""Transport implementation for SFTP access."""
358
# TODO: jam 20060717 Conceivably these could be configurable, either
359
# by auto-tuning at run-time, or by a configuration (per host??)
360
# but the performance curve is pretty flat, so just going with
361
# reasonable defaults.
362
_max_readv_combine = 200
363
# Having to round trip to the server means waiting for a response,
364
# so it is better to download extra bytes.
365
# 8KiB had good performance for both local and remote network operations
366
_bytes_to_read_before_seek = 8192
368
# The sftp spec says that implementations SHOULD allow reads
369
# to be at least 32K. paramiko.readv() does an async request
370
# for the chunks. So we need to keep it within a single request
371
# size for paramiko <= 1.6.1. paramiko 1.6.2 will probably chop
372
# up the request itself, rather than us having to worry about it
373
_max_request_size = 32768
375
def _remote_path(self, relpath):
376
"""Return the path to be passed along the sftp protocol for relpath.
378
:param relpath: is a urlencoded string.
380
remote_path = self._parsed_url.clone(relpath).path
381
# the initial slash should be removed from the path, and treated as a
382
# homedir relative path (the path begins with a double slash if it is
383
# absolute). see draft-ietf-secsh-scp-sftp-ssh-uri-03.txt
384
# RBC 20060118 we are not using this as its too user hostile. instead
385
# we are following lftp and using /~/foo to mean '~/foo'
386
# vila--20070602 and leave absolute paths begin with a single slash.
387
if remote_path.startswith('/~/'):
388
remote_path = remote_path[3:]
389
elif remote_path == '/~':
393
def _create_connection(self, credentials=None):
394
"""Create a new connection with the provided credentials.
396
:param credentials: The credentials needed to establish the connection.
398
:return: The created connection and its associated credentials.
400
The credentials are only the password as it may have been entered
401
interactively by the user and may be different from the one provided
402
in base url at transport creation time.
404
if credentials is None:
405
password = self._parsed_url.password
407
password = credentials
409
vendor = ssh._get_ssh_vendor()
410
user = self._parsed_url.user
412
auth = config.AuthenticationConfig()
413
user = auth.get_user('ssh', self._parsed_url.host,
414
self._parsed_url.port)
415
connection = vendor.connect_sftp(self._parsed_url.user, password,
416
self._parsed_url.host, self._parsed_url.port)
417
return connection, (user, password)
419
def disconnect(self):
420
connection = self._get_connection()
421
if connection is not None:
425
"""Ensures that a connection is established"""
426
connection = self._get_connection()
427
if connection is None:
428
# First connection ever
429
connection, credentials = self._create_connection()
430
self._set_connection(connection, credentials)
280
path = '/'.join(basepath)
281
# could still be a "relative" path here, but relative on the sftp server
284
def relpath(self, abspath):
285
username, password, host, port, path = self._split_url(abspath)
287
if (username != self._username):
288
error.append('username mismatch')
289
if (host != self._host):
290
error.append('host mismatch')
291
if (port != self._port):
292
error.append('port mismatch')
293
if (not path.startswith(self._path)):
294
error.append('path mismatch')
296
extra = ': ' + ', '.join(error)
297
raise PathNotChild(abspath, self.base, extra=extra)
299
return path[pl:].lstrip('/')
433
301
def has(self, relpath):
435
303
Does the target location exist?
438
self._get_sftp().stat(self._remote_path(relpath))
439
# stat result is about 20 bytes, let's say
440
self._report_activity(20, 'read')
306
self._sftp.stat(self._abspath(relpath))
445
def get(self, relpath):
446
"""Get the file at the given relative path.
311
def get(self, relpath, decode=False):
313
Get the file at the given relative path.
448
315
:param relpath: The relative path to the file
451
path = self._remote_path(relpath)
452
f = self._get_sftp().file(path, mode='rb')
453
size = f.stat().st_size
454
if getattr(f, 'prefetch', None) is not None:
318
path = self._abspath(relpath)
319
f = self._sftp.file(path)
320
if self._do_prefetch and hasattr(f, 'prefetch'):
457
except (IOError, paramiko.SSHException) as e:
458
self._translate_io_exception(e, path, ': error retrieving',
459
failure_exc=errors.ReadError)
461
def get_bytes(self, relpath):
462
# reimplement this here so that we can report how many bytes came back
463
with self.get(relpath) as f:
465
self._report_activity(len(bytes), 'read')
468
def _readv(self, relpath, offsets):
469
"""See Transport.readv()"""
470
# We overload the default readv() because we want to use a file
471
# that does not have prefetch enabled.
472
# Also, if we have a new paramiko, it implements an async readv()
477
path = self._remote_path(relpath)
478
fp = self._get_sftp().file(path, mode='rb')
479
readv = getattr(fp, 'readv', None)
481
return self._sftp_readv(fp, offsets, relpath)
482
if 'sftp' in debug.debug_flags:
483
mutter('seek and read %s offsets', len(offsets))
484
return self._seek_and_read(fp, offsets, relpath)
485
except (IOError, paramiko.SSHException) as e:
323
except (IOError, paramiko.SSHException), e:
486
324
self._translate_io_exception(e, path, ': error retrieving')
488
def recommended_page_size(self):
489
"""See Transport.recommended_page_size().
491
For SFTP we suggest a large page size to reduce the overhead
492
introduced by latency.
496
def _sftp_readv(self, fp, offsets, relpath):
497
"""Use the readv() member of fp to do async readv.
499
Then read them using paramiko.readv(). paramiko.readv()
500
does not support ranges > 64K, so it caps the request size, and
501
just reads until it gets all the stuff it wants.
503
helper = _SFTPReadvHelper(offsets, relpath, self._report_activity)
504
return helper.request_and_yield_offsets(fp)
506
def put_file(self, relpath, f, mode=None):
508
Copy the file-like object into the location.
326
def get_partial(self, relpath, start, length=None):
328
Get just part of a file.
330
:param relpath: Path to the file, relative to base
331
:param start: The starting position to read from
332
:param length: The length to read. A length of None indicates
333
read to the end of the file.
334
:return: A file-like object containing at least the specified bytes.
335
Some implementations may return objects which can be read
336
past this length, but this is not guaranteed.
338
# TODO: implement get_partial_multi to help with knit support
339
f = self.get(relpath)
341
if self._do_prefetch and hasattr(f, 'prefetch'):
345
def put(self, relpath, f):
347
Copy the file-like or string object into the location.
510
349
:param relpath: Location to put the contents, relative to base.
511
:param f: File-like object.
512
:param mode: The final mode for the file
350
:param f: File-like or string object.
514
final_path = self._remote_path(relpath)
515
return self._put(final_path, f, mode=mode)
352
final_path = self._abspath(relpath)
353
tmp_relpath = '%s.tmp.%.9f.%d.%d' % (relpath, time.time(),
354
os.getpid(), random.randint(0,0x7FFFFFFF))
355
tmp_abspath = self._abspath(tmp_relpath)
356
fout = self._sftp_open_exclusive(tmp_relpath)
517
def _put(self, abspath, f, mode=None):
518
"""Helper function so both put() and copy_abspaths can reuse the code"""
519
tmp_abspath = '%s.tmp.%.9f.%d.%d' % (abspath, time.time(),
520
os.getpid(), random.randint(0, 0x7FFFFFFF))
521
fout = self._sftp_open_exclusive(tmp_abspath, mode=mode)
525
fout.set_pipelined(True)
526
length = self._pump(f, fout)
527
except (IOError, paramiko.SSHException) as e:
528
self._translate_io_exception(e, tmp_abspath)
529
# XXX: This doesn't truly help like we would like it to.
530
# The problem is that openssh strips sticky bits. So while we
531
# can properly set group write permission, we lose the group
532
# sticky bit. So it is probably best to stop chmodding, and
533
# just tell users that they need to set the umask correctly.
534
# The attr.st_mode = mode, in _sftp_open_exclusive
535
# will handle when the user wants the final mode to be more
536
# restrictive. And then we avoid a round trip. Unless
537
# paramiko decides to expose an async chmod()
539
# This is designed to chmod() right before we close.
540
# Because we set_pipelined() earlier, theoretically we might
541
# avoid the round trip for fout.close()
543
self._get_sftp().chmod(tmp_abspath, mode)
546
self._rename_and_overwrite(tmp_abspath, abspath)
548
except Exception as e:
361
except (paramiko.SSHException, IOError), e:
362
self._translate_io_exception(e, relpath, ': unable to write')
549
364
# If we fail, try to clean up the temporary file
550
365
# before we throw the exception
551
366
# but don't let another exception mess things up
552
# Write out the traceback, because otherwise
553
# the catch and throw destroys it
555
mutter(traceback.format_exc())
559
self._get_sftp().remove(tmp_abspath)
561
# raise the saved except
563
# raise the original with its traceback if we can.
566
def _put_non_atomic_helper(self, relpath, writer, mode=None,
567
create_parent_dir=False,
569
abspath = self._remote_path(relpath)
571
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
572
# set the file mode at create time. If it does, use it.
573
# But for now, we just chmod later anyway.
575
def _open_and_write_file():
576
"""Try to open the target file, raise error on failure"""
369
self._sftp.remove(tmp_abspath)
374
# sftp rename doesn't allow overwriting, so play tricks:
375
tmp_safety = 'bzr.tmp.%.9f.%d.%d' % (time.time(), os.getpid(), random.randint(0, 0x7FFFFFFF))
376
tmp_safety = self._abspath(tmp_safety)
378
self._sftp.rename(final_path, tmp_safety)
580
fout = self._get_sftp().file(abspath, mode='wb')
581
fout.set_pipelined(True)
583
except (paramiko.SSHException, IOError) as e:
584
self._translate_io_exception(e, abspath,
587
# This is designed to chmod() right before we close.
588
# Because we set_pipelined() earlier, theoretically we might
589
# avoid the round trip for fout.close()
591
self._get_sftp().chmod(abspath, mode)
385
self._sftp.rename(tmp_abspath, final_path)
386
except (paramiko.SSHException, IOError), e:
387
self._translate_io_exception(e, relpath, ': unable to rename')
596
if not create_parent_dir:
597
_open_and_write_file()
600
# Try error handling to create the parent directory if we need to
602
_open_and_write_file()
604
# Try to create the parent directory, and then go back to
606
parent_dir = os.path.dirname(abspath)
607
self._mkdir(parent_dir, dir_mode)
608
_open_and_write_file()
610
def put_file_non_atomic(self, relpath, f, mode=None,
611
create_parent_dir=False,
613
"""Copy the file-like object into the target location.
615
This function is not strictly safe to use. It is only meant to
616
be used when you already know that the target does not exist.
617
It is not safe, because it will open and truncate the remote
618
file. So there may be a time when the file has invalid contents.
620
:param relpath: The remote location to put the contents.
621
:param f: File-like object.
622
:param mode: Possible access permissions for new file.
623
None means do not set remote permissions.
624
:param create_parent_dir: If we cannot create the target file because
625
the parent directory does not exist, go ahead and
626
create it, and then try again.
630
self._put_non_atomic_helper(relpath, writer, mode=mode,
631
create_parent_dir=create_parent_dir,
634
def put_bytes_non_atomic(self, relpath, raw_bytes, mode=None,
635
create_parent_dir=False,
637
if not isinstance(raw_bytes, bytes):
639
'raw_bytes must be a plain string, not %s' % type(raw_bytes))
642
fout.write(raw_bytes)
643
self._put_non_atomic_helper(relpath, writer, mode=mode,
644
create_parent_dir=create_parent_dir,
393
self._sftp.unlink(tmp_safety)
395
self._sftp.rename(tmp_safety, final_path)
647
397
def iter_files_recursive(self):
648
398
"""Walk the relative paths of all files in this transport."""
649
# progress is handled by list_dir
650
399
queue = list(self.list_dir('.'))
652
relpath = queue.pop(0)
401
relpath = urllib.quote(queue.pop(0))
653
402
st = self.stat(relpath)
654
403
if stat.S_ISDIR(st.st_mode):
655
404
for i, basename in enumerate(self.list_dir(relpath)):
656
queue.insert(i, relpath + '/' + basename)
405
queue.insert(i, relpath+'/'+basename)
660
def _mkdir(self, abspath, mode=None):
666
self._report_activity(len(abspath), 'write')
667
self._get_sftp().mkdir(abspath, local_mode)
668
self._report_activity(1, 'read')
670
# chmod a dir through sftp will erase any sgid bit set
671
# on the server side. So, if the bit mode are already
672
# set, avoid the chmod. If the mode is not fine but
673
# the sgid bit is set, report a warning to the user
674
# with the umask fix.
675
stat = self._get_sftp().lstat(abspath)
676
mode = mode & 0o777 # can't set special bits anyway
677
if mode != stat.st_mode & 0o777:
678
if stat.st_mode & 0o6000:
679
warning('About to chmod %s over sftp, which will result'
680
' in its suid or sgid bits being cleared. If'
681
' you want to preserve those bits, change your '
682
' environment on the server to use umask 0%03o.'
683
% (abspath, 0o777 - mode))
684
self._get_sftp().chmod(abspath, mode=mode)
685
except (paramiko.SSHException, IOError) as e:
686
self._translate_io_exception(e, abspath, ': unable to mkdir',
687
failure_exc=FileExists)
689
def mkdir(self, relpath, mode=None):
409
def mkdir(self, relpath):
690
410
"""Create a directory at the given path."""
691
self._mkdir(self._remote_path(relpath), mode=mode)
693
def open_write_stream(self, relpath, mode=None):
694
"""See Transport.open_write_stream."""
695
# initialise the file to zero-length
696
# this is three round trips, but we don't use this
697
# api more than once per write_group at the moment so
698
# it is a tolerable overhead. Better would be to truncate
699
# the file after opening. RBC 20070805
700
self.put_bytes_non_atomic(relpath, b"", mode)
701
abspath = self._remote_path(relpath)
702
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
703
# set the file mode at create time. If it does, use it.
704
# But for now, we just chmod later anyway.
707
handle = self._get_sftp().file(abspath, mode='wb')
708
handle.set_pipelined(True)
709
except (paramiko.SSHException, IOError) as e:
710
self._translate_io_exception(e, abspath,
712
_file_streams[self.abspath(relpath)] = handle
713
return FileFileStream(self, relpath, handle)
412
path = self._abspath(relpath)
413
self._sftp.mkdir(path)
414
except (paramiko.SSHException, IOError), e:
415
self._translate_io_exception(e, relpath, ': unable to mkdir',
416
failure_exc=FileExists)
715
def _translate_io_exception(self, e, path, more_info='',
716
failure_exc=PathError):
418
def _translate_io_exception(self, e, path, more_info='', failure_exc=NoSuchFile):
717
419
"""Translate a paramiko or IOError into a friendlier exception.
719
421
:param e: The original exception
723
425
:param failure_exc: Paramiko has the super fun ability to raise completely
724
426
opaque errors that just set "e.args = ('Failure',)" with
725
427
no more information.
726
If this parameter is set, it defines the exception
727
to raise in these cases.
428
This sometimes means FileExists, but it also sometimes
729
431
# paramiko seems to generate detailless errors.
730
432
self._translate_error(e, path, raise_generic=False)
731
if getattr(e, 'args', None) is not None:
433
if hasattr(e, 'args'):
732
434
if (e.args == ('No such file or directory',) or
733
e.args == ('No such file',)):
435
e.args == ('No such file',)):
734
436
raise NoSuchFile(path, str(e) + more_info)
735
if (e.args == ('mkdir failed',) or
736
e.args[0].startswith('syserr: File exists')):
437
if (e.args == ('mkdir failed',)):
737
438
raise FileExists(path, str(e) + more_info)
738
439
# strange but true, for the paramiko server.
739
440
if (e.args == ('Failure',)):
740
441
raise failure_exc(path, str(e) + more_info)
741
# Can be something like args = ('Directory not empty:
742
# '/srv/bazaar.launchpad.net/blah...: '
743
# [Errno 39] Directory not empty',)
744
if (e.args[0].startswith('Directory not empty: ')
745
or getattr(e, 'errno', None) == errno.ENOTEMPTY):
746
raise errors.DirectoryNotEmpty(path, str(e))
747
if e.args == ('Operation unsupported',):
748
raise errors.TransportNotPossible()
749
mutter('Raising exception with args %s', e.args)
750
if getattr(e, 'errno', None) is not None:
751
mutter('Raising exception with errno %s', e.errno)
754
def append_file(self, relpath, f, mode=None):
444
def append(self, relpath, f):
756
446
Append the text in the file-like object into the final
760
path = self._remote_path(relpath)
761
fout = self._get_sftp().file(path, 'ab')
763
self._get_sftp().chmod(path, mode)
450
path = self._abspath(relpath)
451
fout = self._sftp.file(path, 'ab')
765
452
self._pump(f, fout)
767
except (IOError, paramiko.SSHException) as e:
453
except (IOError, paramiko.SSHException), e:
768
454
self._translate_io_exception(e, relpath, ': unable to append')
770
def rename(self, rel_from, rel_to):
771
"""Rename without special overwriting"""
773
self._get_sftp().rename(self._remote_path(rel_from),
774
self._remote_path(rel_to))
775
except (IOError, paramiko.SSHException) as e:
776
self._translate_io_exception(e, rel_from,
777
': unable to rename to %r' % (rel_to))
779
def _rename_and_overwrite(self, abs_from, abs_to):
780
"""Do a fancy rename on the remote server.
782
Using the implementation provided by osutils.
785
sftp = self._get_sftp()
786
fancy_rename(abs_from, abs_to,
787
rename_func=sftp.rename,
788
unlink_func=sftp.remove)
789
except (IOError, paramiko.SSHException) as e:
790
self._translate_io_exception(e, abs_from,
791
': unable to rename to %r' % (abs_to))
456
def copy(self, rel_from, rel_to):
457
"""Copy the item at rel_from to the location at rel_to"""
458
path_from = self._abspath(rel_from)
459
path_to = self._abspath(rel_to)
460
self._copy_abspaths(path_from, path_to)
462
def _copy_abspaths(self, path_from, path_to):
463
"""Copy files given an absolute path
465
:param path_from: Path on remote server to read
466
:param path_to: Path on remote server to write
469
TODO: Should the destination location be atomically created?
470
This has not been specified
471
TODO: This should use some sort of remote copy, rather than
472
pulling the data locally, and then writing it remotely
475
fin = self._sftp.file(path_from, 'rb')
477
fout = self._sftp.file(path_to, 'wb')
479
fout.set_pipelined(True)
480
self._pump(fin, fout)
485
except (IOError, paramiko.SSHException), e:
486
self._translate_io_exception(e, path_from, ': unable copy to: %r' % path_to)
488
def copy_to(self, relpaths, other, pb=None):
489
"""Copy a set of entries from self into another Transport.
491
:param relpaths: A list/generator of entries to be copied.
493
if isinstance(other, SFTPTransport) and other._sftp is self._sftp:
494
# Both from & to are on the same remote filesystem
495
# We can use a remote copy, instead of pulling locally, and pushing
497
total = self._get_total(relpaths)
499
for path in relpaths:
500
path_from = self._abspath(relpath)
501
path_to = other._abspath(relpath)
502
self._update_pb(pb, 'copy-to', count, total)
503
self._copy_abspaths(path_from, path_to)
507
return super(SFTPTransport, self).copy_to(relpaths, other, pb=pb)
509
# The dummy implementation just does a simple get + put
510
def copy_entry(path):
511
other.put(path, self.get(path))
513
return self._iterate_over(relpaths, copy_entry, pb, 'copy_to', expand=False)
793
515
def move(self, rel_from, rel_to):
794
516
"""Move the item at rel_from to the location at rel_to"""
795
path_from = self._remote_path(rel_from)
796
path_to = self._remote_path(rel_to)
797
self._rename_and_overwrite(path_from, path_to)
517
path_from = self._abspath(rel_from)
518
path_to = self._abspath(rel_to)
520
self._sftp.rename(path_from, path_to)
521
except (IOError, paramiko.SSHException), e:
522
self._translate_io_exception(e, path_from, ': unable to move to: %r' % path_to)
799
524
def delete(self, relpath):
800
525
"""Delete the item at relpath"""
801
path = self._remote_path(relpath)
526
path = self._abspath(relpath)
803
self._get_sftp().remove(path)
804
except (IOError, paramiko.SSHException) as e:
528
self._sftp.remove(path)
529
except (IOError, paramiko.SSHException), e:
805
530
self._translate_io_exception(e, path, ': unable to delete')
807
def external_url(self):
808
"""See breezy.transport.Transport.external_url."""
809
# the external path for SFTP is the base
812
532
def listable(self):
813
533
"""Return True if this store supports listing."""
895
578
# that we have taken the lock.
896
579
return SFTPLock(relpath, self)
898
def _sftp_open_exclusive(self, abspath, mode=None):
582
def _unparse_url(self, path=None):
585
path = urllib.quote(path)
586
if path.startswith('/'):
587
path = '/%2F' + path[1:]
590
netloc = urllib.quote(self._host)
591
if self._username is not None:
592
netloc = '%s@%s' % (urllib.quote(self._username), netloc)
593
if self._port is not None:
594
netloc = '%s:%d' % (netloc, self._port)
596
return urlparse.urlunparse(('sftp', netloc, path, '', '', ''))
598
def _split_url(self, url):
599
if isinstance(url, unicode):
600
url = url.encode('utf-8')
601
(scheme, netloc, path, params,
602
query, fragment) = urlparse.urlparse(url, allow_fragments=False)
603
assert scheme == 'sftp'
604
username = password = host = port = None
606
username, host = netloc.split('@', 1)
608
username, password = username.split(':', 1)
609
password = urllib.unquote(password)
610
username = urllib.unquote(username)
615
host, port = host.rsplit(':', 1)
619
# TODO: Should this be ConnectionError?
620
raise TransportError('%s: invalid port number' % port)
621
host = urllib.unquote(host)
623
path = urllib.unquote(path)
625
# the initial slash should be removed from the path, and treated
626
# as a homedir relative path (the path begins with a double slash
627
# if it is absolute).
628
# see draft-ietf-secsh-scp-sftp-ssh-uri-03.txt
629
if path.startswith('/'):
632
return (username, password, host, port, path)
634
def _parse_url(self, url):
635
(self._username, self._password,
636
self._host, self._port, self._path) = self._split_url(url)
638
def _sftp_connect(self):
639
"""Connect to the remote sftp server.
640
After this, self._sftp should have a valid connection (or
641
we raise an TransportError 'could not connect').
643
TODO: Raise a more reasonable ConnectionFailed exception
645
global _connected_hosts
647
idx = (self._host, self._port, self._username)
649
self._sftp = _connected_hosts[idx]
654
vendor = _get_ssh_vendor()
656
sock = SFTPSubprocess(self._host, self._port, self._username)
657
self._sftp = SFTPClient(sock)
659
self._paramiko_connect()
661
_connected_hosts[idx] = self._sftp
663
def _paramiko_connect(self):
664
global SYSTEM_HOSTKEYS, BZR_HOSTKEYS
669
t = paramiko.Transport((self._host, self._port or 22))
671
except paramiko.SSHException, e:
672
raise ConnectionError('Unable to reach SSH host %s:%d' %
673
(self._host, self._port), e)
675
server_key = t.get_remote_server_key()
676
server_key_hex = paramiko.util.hexify(server_key.get_fingerprint())
677
keytype = server_key.get_name()
678
if SYSTEM_HOSTKEYS.has_key(self._host) and SYSTEM_HOSTKEYS[self._host].has_key(keytype):
679
our_server_key = SYSTEM_HOSTKEYS[self._host][keytype]
680
our_server_key_hex = paramiko.util.hexify(our_server_key.get_fingerprint())
681
elif BZR_HOSTKEYS.has_key(self._host) and BZR_HOSTKEYS[self._host].has_key(keytype):
682
our_server_key = BZR_HOSTKEYS[self._host][keytype]
683
our_server_key_hex = paramiko.util.hexify(our_server_key.get_fingerprint())
685
warning('Adding %s host key for %s: %s' % (keytype, self._host, server_key_hex))
686
if not BZR_HOSTKEYS.has_key(self._host):
687
BZR_HOSTKEYS[self._host] = {}
688
BZR_HOSTKEYS[self._host][keytype] = server_key
689
our_server_key = server_key
690
our_server_key_hex = paramiko.util.hexify(our_server_key.get_fingerprint())
692
if server_key != our_server_key:
693
filename1 = os.path.expanduser('~/.ssh/known_hosts')
694
filename2 = os.path.join(config_dir(), 'ssh_host_keys')
695
raise TransportError('Host keys for %s do not match! %s != %s' % \
696
(self._host, our_server_key_hex, server_key_hex),
697
['Try editing %s or %s' % (filename1, filename2)])
702
self._sftp = t.open_sftp_client()
703
except paramiko.SSHException, e:
704
raise ConnectionError('Unable to start sftp client %s:%d' %
705
(self._host, self._port), e)
707
def _sftp_auth(self, transport):
708
# paramiko requires a username, but it might be none if nothing was supplied
709
# use the local username, just in case.
710
# We don't override self._username, because if we aren't using paramiko,
711
# the username might be specified in ~/.ssh/config and we don't want to
712
# force it to something else
713
# Also, it would mess up the self.relpath() functionality
714
username = self._username or getpass.getuser()
716
# Paramiko tries to open a socket.AF_UNIX in order to connect
717
# to ssh-agent. That attribute doesn't exist on win32 (it does in cygwin)
718
# so we get an AttributeError exception. For now, just don't try to
719
# connect to an agent if we are on win32
720
if sys.platform != 'win32':
721
agent = paramiko.Agent()
722
for key in agent.get_keys():
723
mutter('Trying SSH agent key %s' % paramiko.util.hexify(key.get_fingerprint()))
725
transport.auth_publickey(username, key)
727
except paramiko.SSHException, e:
730
# okay, try finding id_rsa or id_dss? (posix only)
731
if self._try_pkey_auth(transport, paramiko.RSAKey, username, 'id_rsa'):
733
if self._try_pkey_auth(transport, paramiko.DSSKey, username, 'id_dsa'):
739
transport.auth_password(username, self._password)
741
except paramiko.SSHException, e:
744
# FIXME: Don't keep a password held in memory if you can help it
745
#self._password = None
747
# give up and ask for a password
748
password = bzrlib.ui.ui_factory.get_password(
749
prompt='SSH %(user)s@%(host)s password',
750
user=username, host=self._host)
752
transport.auth_password(username, password)
753
except paramiko.SSHException, e:
754
raise ConnectionError('Unable to authenticate to SSH host as %s@%s' %
755
(username, self._host), e)
757
def _try_pkey_auth(self, transport, pkey_class, username, filename):
758
filename = os.path.expanduser('~/.ssh/' + filename)
760
key = pkey_class.from_private_key_file(filename)
761
transport.auth_publickey(username, key)
763
except paramiko.PasswordRequiredException:
764
password = bzrlib.ui.ui_factory.get_password(
765
prompt='SSH %(filename)s password',
768
key = pkey_class.from_private_key_file(filename, password)
769
transport.auth_publickey(username, key)
771
except paramiko.SSHException:
772
mutter('SSH authentication via %s key failed.' % (os.path.basename(filename),))
773
except paramiko.SSHException:
774
mutter('SSH authentication via %s key failed.' % (os.path.basename(filename),))
779
def _sftp_open_exclusive(self, relpath):
899
780
"""Open a remote path exclusively.
901
782
SFTP supports O_EXCL (SFTP_FLAG_EXCL), which fails if