1
# Copyright (C) 2005-2011, 2016, 2017 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Implementation of Transport over SFTP, using paramiko."""
19
from __future__ import absolute_import
21
# TODO: Remove the transport-based lock_read and lock_write methods. They'll
22
# then raise TransportNotPossible, which will break remote access to any
23
# formats which rely on OS-level locks. That should be fine as those formats
24
# are pretty old, but these combinations may have to be removed from the test
25
# suite. Those formats all date back to 0.7; so we should be able to remove
26
# these methods when we officially drop support for those formats.
44
from ..errors import (FileExists,
51
from ..osutils import fancy_rename
52
from ..sixish import (
55
from ..trace import mutter, warning
56
from ..transport import (
63
# Disable one particular warning that comes from paramiko in Python2.5; if
64
# this is emitted at the wrong time it tends to cause spurious test failures
65
# or at least noise in the test case::
67
# [1770/7639 in 86s, 1 known failures, 50 skipped, 2 missing features]
68
# test_permissions.TestSftpPermissions.test_new_files
69
# /var/lib/python-support/python2.5/paramiko/message.py:226: DeprecationWarning: integer argument expected, got float
70
# self.packet.write(struct.pack('>I', n))
71
warnings.filterwarnings('ignore',
72
'integer argument expected, got float',
73
category=DeprecationWarning,
74
module='paramiko.message')
78
except ImportError as e:
79
raise ParamikoNotPresent(e)
81
from paramiko.sftp import (SFTP_FLAG_WRITE, SFTP_FLAG_CREATE,
82
SFTP_FLAG_EXCL, SFTP_FLAG_TRUNC,
83
SFTP_OK, CMD_HANDLE, CMD_OPEN)
84
from paramiko.sftp_attr import SFTPAttributes
85
from paramiko.sftp_file import SFTPFile
88
# GZ 2017-05-25: Some dark hackery to monkeypatch out issues with paramiko's
89
# Python 3 compatibility code. Replace broken b() and asbytes() code.
91
from paramiko.py3compat import b as _bad
92
from paramiko.common import asbytes as _bad_asbytes
96
def _b_for_broken_paramiko(s, encoding='utf8'):
97
"""Hacked b() that does not raise TypeError."""
98
# https://github.com/paramiko/paramiko/issues/967
99
if not isinstance(s, bytes):
100
encode = getattr(s, 'encode', None)
101
if encode is not None:
102
return encode(encoding)
103
# Would like to pass buffer objects along, but have to realise.
104
tobytes = getattr(s, 'tobytes', None)
105
if tobytes is not None:
109
def _asbytes_for_broken_paramiko(s):
110
"""Hacked asbytes() that does not raise Exception."""
111
# https://github.com/paramiko/paramiko/issues/968
112
if not isinstance(s, bytes):
113
encode = getattr(s, 'encode', None)
114
if encode is not None:
115
return encode('utf8')
116
asbytes = getattr(s, 'asbytes', None)
117
if asbytes is not None:
121
_bad.func_code = _b_for_broken_paramiko.func_code
122
_bad_asbytes.func_code = _asbytes_for_broken_paramiko.func_code
125
class SFTPLock(object):
126
"""This fakes a lock in a remote location.
128
A present lock is indicated just by the existence of a file. This
129
doesn't work well on all transports and they are only used in
130
deprecated storage formats.
133
__slots__ = ['path', 'lock_path', 'lock_file', 'transport']
135
def __init__(self, path, transport):
136
self.lock_file = None
138
self.lock_path = path + '.write-lock'
139
self.transport = transport
141
# RBC 20060103 FIXME should we be using private methods here ?
142
abspath = transport._remote_path(self.lock_path)
143
self.lock_file = transport._sftp_open_exclusive(abspath)
145
raise LockError('File %r already locked' % (self.path,))
148
if not self.lock_file:
150
self.lock_file.close()
151
self.lock_file = None
153
self.transport.delete(self.lock_path)
154
except (NoSuchFile,):
155
# What specific errors should we catch here?
159
class _SFTPReadvHelper(object):
160
"""A class to help with managing the state of a readv request."""
162
# See _get_requests for an explanation.
163
_max_request_size = 32768
165
def __init__(self, original_offsets, relpath, _report_activity):
166
"""Create a new readv helper.
168
:param original_offsets: The original requests given by the caller of
170
:param relpath: The name of the file (if known)
171
:param _report_activity: A Transport._report_activity bound method,
172
to be called as data arrives.
174
self.original_offsets = list(original_offsets)
175
self.relpath = relpath
176
self._report_activity = _report_activity
178
def _get_requests(self):
179
"""Break up the offsets into individual requests over sftp.
181
The SFTP spec only requires implementers to support 32kB requests. We
182
could try something larger (openssh supports 64kB), but then we have to
183
handle requests that fail.
184
So instead, we just break up our maximum chunks into 32kB chunks, and
185
asyncronously requests them.
186
Newer versions of paramiko would do the chunking for us, but we want to
187
start processing results right away, so we do it ourselves.
189
# TODO: Because we issue async requests, we don't 'fudge' any extra
190
# data. I'm not 100% sure that is the best choice.
192
# The first thing we do, is to collapse the individual requests as much
193
# as possible, so we don't issues requests <32kB
194
sorted_offsets = sorted(self.original_offsets)
195
coalesced = list(ConnectedTransport._coalesce_offsets(sorted_offsets,
196
limit=0, fudge_factor=0))
198
for c_offset in coalesced:
199
start = c_offset.start
200
size = c_offset.length
202
# Break this up into 32kB requests
204
next_size = min(size, self._max_request_size)
205
requests.append((start, next_size))
208
if 'sftp' in debug.debug_flags:
209
mutter('SFTP.readv(%s) %s offsets => %s coalesced => %s requests',
210
self.relpath, len(sorted_offsets), len(coalesced),
214
def request_and_yield_offsets(self, fp):
215
"""Request the data from the remote machine, yielding the results.
217
:param fp: A Paramiko SFTPFile object that supports readv.
218
:return: Yield the data requested by the original readv caller, one by
221
requests = self._get_requests()
222
offset_iter = iter(self.original_offsets)
223
cur_offset, cur_size = next(offset_iter)
224
# paramiko .readv() yields strings that are in the order of the requests
225
# So we track the current request to know where the next data is
226
# being returned from.
232
# This is used to buffer chunks which we couldn't process yet
233
# It is (start, end, data) tuples.
235
# Create an 'unlimited' data stream, so we stop based on requests,
236
# rather than just because the data stream ended. This lets us detect
238
data_stream = itertools.chain(fp.readv(requests),
239
itertools.repeat(None))
240
for (start, length), data in zip(requests, data_stream):
242
if cur_coalesced is not None:
243
raise errors.ShortReadvError(self.relpath,
244
start, length, len(data))
245
if len(data) != length:
246
raise errors.ShortReadvError(self.relpath,
247
start, length, len(data))
248
self._report_activity(length, 'read')
250
# This is the first request, just buffer it
251
buffered_data = [data]
252
buffered_len = length
254
elif start == last_end:
255
# The data we are reading fits neatly on the previous
256
# buffer, so this is all part of a larger coalesced range.
257
buffered_data.append(data)
258
buffered_len += length
260
# We have an 'interrupt' in the data stream. So we know we are
261
# at a request boundary.
263
# We haven't consumed the buffer so far, so put it into
264
# data_chunks, and continue.
265
buffered = ''.join(buffered_data)
266
data_chunks.append((input_start, buffered))
268
buffered_data = [data]
269
buffered_len = length
270
last_end = start + length
271
if input_start == cur_offset and cur_size <= buffered_len:
272
# Simplify the next steps a bit by transforming buffered_data
273
# into a single string. We also have the nice property that
274
# when there is only one string ''.join([x]) == x, so there is
276
buffered = ''.join(buffered_data)
277
# Clean out buffered data so that we keep memory
281
# TODO: We *could* also consider the case where cur_offset is in
282
# in the buffered range, even though it doesn't *start*
283
# the buffered range. But for packs we pretty much always
284
# read in order, so you won't get any extra data in the
286
while (input_start == cur_offset
287
and (buffered_offset + cur_size) <= buffered_len):
288
# We've buffered enough data to process this request, spit it
290
cur_data = buffered[buffered_offset:buffered_offset + cur_size]
291
# move the direct pointer into our buffered data
292
buffered_offset += cur_size
293
# Move the start-of-buffer pointer
294
input_start += cur_size
295
# Yield the requested data
296
yield cur_offset, cur_data
297
cur_offset, cur_size = next(offset_iter)
298
# at this point, we've consumed as much of buffered as we can,
299
# so break off the portion that we consumed
300
if buffered_offset == len(buffered_data):
301
# No tail to leave behind
305
buffered = buffered[buffered_offset:]
306
buffered_data = [buffered]
307
buffered_len = len(buffered)
308
# now that the data stream is done, close the handle
311
buffered = ''.join(buffered_data)
313
data_chunks.append((input_start, buffered))
315
if 'sftp' in debug.debug_flags:
316
mutter('SFTP readv left with %d out-of-order bytes',
317
sum(len(x[1]) for x in data_chunks))
318
# We've processed all the readv data, at this point, anything we
319
# couldn't process is in data_chunks. This doesn't happen often, so
320
# this code path isn't optimized
321
# We use an interesting process for data_chunks
322
# Specifically if we have "bisect_left([(start, len, entries)],
324
# If start == qstart, then we get the specific node. Otherwise we
325
# get the previous node
327
idx = bisect.bisect_left(data_chunks, (cur_offset,))
328
if idx < len(data_chunks) and data_chunks[idx][0] == cur_offset:
329
# The data starts here
330
data = data_chunks[idx][1][:cur_size]
332
# The data is in a portion of a previous page
334
sub_offset = cur_offset - data_chunks[idx][0]
335
data = data_chunks[idx][1]
336
data = data[sub_offset:sub_offset + cur_size]
338
# We are missing the page where the data should be found,
341
if len(data) != cur_size:
342
raise AssertionError('We must have miscalulated.'
343
' We expected %d bytes, but only found %d'
344
% (cur_size, len(data)))
345
yield cur_offset, data
346
cur_offset, cur_size = next(offset_iter)
349
class SFTPTransport(ConnectedTransport):
350
"""Transport implementation for SFTP access."""
352
# TODO: jam 20060717 Conceivably these could be configurable, either
353
# by auto-tuning at run-time, or by a configuration (per host??)
354
# but the performance curve is pretty flat, so just going with
355
# reasonable defaults.
356
_max_readv_combine = 200
357
# Having to round trip to the server means waiting for a response,
358
# so it is better to download extra bytes.
359
# 8KiB had good performance for both local and remote network operations
360
_bytes_to_read_before_seek = 8192
362
# The sftp spec says that implementations SHOULD allow reads
363
# to be at least 32K. paramiko.readv() does an async request
364
# for the chunks. So we need to keep it within a single request
365
# size for paramiko <= 1.6.1. paramiko 1.6.2 will probably chop
366
# up the request itself, rather than us having to worry about it
367
_max_request_size = 32768
369
def _remote_path(self, relpath):
370
"""Return the path to be passed along the sftp protocol for relpath.
372
:param relpath: is a urlencoded string.
374
remote_path = self._parsed_url.clone(relpath).path
375
# the initial slash should be removed from the path, and treated as a
376
# homedir relative path (the path begins with a double slash if it is
377
# absolute). see draft-ietf-secsh-scp-sftp-ssh-uri-03.txt
378
# RBC 20060118 we are not using this as its too user hostile. instead
379
# we are following lftp and using /~/foo to mean '~/foo'
380
# vila--20070602 and leave absolute paths begin with a single slash.
381
if remote_path.startswith('/~/'):
382
remote_path = remote_path[3:]
383
elif remote_path == '/~':
387
def _create_connection(self, credentials=None):
388
"""Create a new connection with the provided credentials.
390
:param credentials: The credentials needed to establish the connection.
392
:return: The created connection and its associated credentials.
394
The credentials are only the password as it may have been entered
395
interactively by the user and may be different from the one provided
396
in base url at transport creation time.
398
if credentials is None:
399
password = self._parsed_url.password
401
password = credentials
403
vendor = ssh._get_ssh_vendor()
404
user = self._parsed_url.user
406
auth = config.AuthenticationConfig()
407
user = auth.get_user('ssh', self._parsed_url.host,
408
self._parsed_url.port)
409
connection = vendor.connect_sftp(self._parsed_url.user, password,
410
self._parsed_url.host, self._parsed_url.port)
411
return connection, (user, password)
413
def disconnect(self):
414
connection = self._get_connection()
415
if connection is not None:
419
"""Ensures that a connection is established"""
420
connection = self._get_connection()
421
if connection is None:
422
# First connection ever
423
connection, credentials = self._create_connection()
424
self._set_connection(connection, credentials)
427
def has(self, relpath):
429
Does the target location exist?
432
self._get_sftp().stat(self._remote_path(relpath))
433
# stat result is about 20 bytes, let's say
434
self._report_activity(20, 'read')
439
def get(self, relpath):
440
"""Get the file at the given relative path.
442
:param relpath: The relative path to the file
445
path = self._remote_path(relpath)
446
f = self._get_sftp().file(path, mode='rb')
447
size = f.stat().st_size
448
if getattr(f, 'prefetch', None) is not None:
451
except (IOError, paramiko.SSHException) as e:
452
self._translate_io_exception(e, path, ': error retrieving',
453
failure_exc=errors.ReadError)
455
def get_bytes(self, relpath):
456
# reimplement this here so that we can report how many bytes came back
457
f = self.get(relpath)
460
self._report_activity(len(bytes), 'read')
465
def _readv(self, relpath, offsets):
466
"""See Transport.readv()"""
467
# We overload the default readv() because we want to use a file
468
# that does not have prefetch enabled.
469
# Also, if we have a new paramiko, it implements an async readv()
474
path = self._remote_path(relpath)
475
fp = self._get_sftp().file(path, mode='rb')
476
readv = getattr(fp, 'readv', None)
478
return self._sftp_readv(fp, offsets, relpath)
479
if 'sftp' in debug.debug_flags:
480
mutter('seek and read %s offsets', len(offsets))
481
return self._seek_and_read(fp, offsets, relpath)
482
except (IOError, paramiko.SSHException) as e:
483
self._translate_io_exception(e, path, ': error retrieving')
485
def recommended_page_size(self):
486
"""See Transport.recommended_page_size().
488
For SFTP we suggest a large page size to reduce the overhead
489
introduced by latency.
493
def _sftp_readv(self, fp, offsets, relpath):
494
"""Use the readv() member of fp to do async readv.
496
Then read them using paramiko.readv(). paramiko.readv()
497
does not support ranges > 64K, so it caps the request size, and
498
just reads until it gets all the stuff it wants.
500
helper = _SFTPReadvHelper(offsets, relpath, self._report_activity)
501
return helper.request_and_yield_offsets(fp)
503
def put_file(self, relpath, f, mode=None):
505
Copy the file-like object into the location.
507
:param relpath: Location to put the contents, relative to base.
508
:param f: File-like object.
509
:param mode: The final mode for the file
511
final_path = self._remote_path(relpath)
512
return self._put(final_path, f, mode=mode)
514
def _put(self, abspath, f, mode=None):
515
"""Helper function so both put() and copy_abspaths can reuse the code"""
516
tmp_abspath = '%s.tmp.%.9f.%d.%d' % (abspath, time.time(),
517
os.getpid(), random.randint(0,0x7FFFFFFF))
518
fout = self._sftp_open_exclusive(tmp_abspath, mode=mode)
522
fout.set_pipelined(True)
523
length = self._pump(f, fout)
524
except (IOError, paramiko.SSHException) as e:
525
self._translate_io_exception(e, tmp_abspath)
526
# XXX: This doesn't truly help like we would like it to.
527
# The problem is that openssh strips sticky bits. So while we
528
# can properly set group write permission, we lose the group
529
# sticky bit. So it is probably best to stop chmodding, and
530
# just tell users that they need to set the umask correctly.
531
# The attr.st_mode = mode, in _sftp_open_exclusive
532
# will handle when the user wants the final mode to be more
533
# restrictive. And then we avoid a round trip. Unless
534
# paramiko decides to expose an async chmod()
536
# This is designed to chmod() right before we close.
537
# Because we set_pipelined() earlier, theoretically we might
538
# avoid the round trip for fout.close()
540
self._get_sftp().chmod(tmp_abspath, mode)
543
self._rename_and_overwrite(tmp_abspath, abspath)
545
except Exception as e:
546
# If we fail, try to clean up the temporary file
547
# before we throw the exception
548
# but don't let another exception mess things up
549
# Write out the traceback, because otherwise
550
# the catch and throw destroys it
552
mutter(traceback.format_exc())
556
self._get_sftp().remove(tmp_abspath)
558
# raise the saved except
560
# raise the original with its traceback if we can.
563
def _put_non_atomic_helper(self, relpath, writer, mode=None,
564
create_parent_dir=False,
566
abspath = self._remote_path(relpath)
568
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
569
# set the file mode at create time. If it does, use it.
570
# But for now, we just chmod later anyway.
572
def _open_and_write_file():
573
"""Try to open the target file, raise error on failure"""
577
fout = self._get_sftp().file(abspath, mode='wb')
578
fout.set_pipelined(True)
580
except (paramiko.SSHException, IOError) as e:
581
self._translate_io_exception(e, abspath,
584
# This is designed to chmod() right before we close.
585
# Because we set_pipelined() earlier, theoretically we might
586
# avoid the round trip for fout.close()
588
self._get_sftp().chmod(abspath, mode)
593
if not create_parent_dir:
594
_open_and_write_file()
597
# Try error handling to create the parent directory if we need to
599
_open_and_write_file()
601
# Try to create the parent directory, and then go back to
603
parent_dir = os.path.dirname(abspath)
604
self._mkdir(parent_dir, dir_mode)
605
_open_and_write_file()
607
def put_file_non_atomic(self, relpath, f, mode=None,
608
create_parent_dir=False,
610
"""Copy the file-like object into the target location.
612
This function is not strictly safe to use. It is only meant to
613
be used when you already know that the target does not exist.
614
It is not safe, because it will open and truncate the remote
615
file. So there may be a time when the file has invalid contents.
617
:param relpath: The remote location to put the contents.
618
:param f: File-like object.
619
:param mode: Possible access permissions for new file.
620
None means do not set remote permissions.
621
:param create_parent_dir: If we cannot create the target file because
622
the parent directory does not exist, go ahead and
623
create it, and then try again.
627
self._put_non_atomic_helper(relpath, writer, mode=mode,
628
create_parent_dir=create_parent_dir,
631
def put_bytes_non_atomic(self, relpath, raw_bytes, mode=None,
632
create_parent_dir=False,
634
if not isinstance(raw_bytes, str):
636
'raw_bytes must be a plain string, not %s' % type(raw_bytes))
639
fout.write(raw_bytes)
640
self._put_non_atomic_helper(relpath, writer, mode=mode,
641
create_parent_dir=create_parent_dir,
644
def iter_files_recursive(self):
645
"""Walk the relative paths of all files in this transport."""
646
# progress is handled by list_dir
647
queue = list(self.list_dir('.'))
649
relpath = queue.pop(0)
650
st = self.stat(relpath)
651
if stat.S_ISDIR(st.st_mode):
652
for i, basename in enumerate(self.list_dir(relpath)):
653
queue.insert(i, relpath+'/'+basename)
657
def _mkdir(self, abspath, mode=None):
663
self._report_activity(len(abspath), 'write')
664
self._get_sftp().mkdir(abspath, local_mode)
665
self._report_activity(1, 'read')
667
# chmod a dir through sftp will erase any sgid bit set
668
# on the server side. So, if the bit mode are already
669
# set, avoid the chmod. If the mode is not fine but
670
# the sgid bit is set, report a warning to the user
671
# with the umask fix.
672
stat = self._get_sftp().lstat(abspath)
673
mode = mode & 0o777 # can't set special bits anyway
674
if mode != stat.st_mode & 0o777:
675
if stat.st_mode & 0o6000:
676
warning('About to chmod %s over sftp, which will result'
677
' in its suid or sgid bits being cleared. If'
678
' you want to preserve those bits, change your '
679
' environment on the server to use umask 0%03o.'
680
% (abspath, 0o777 - mode))
681
self._get_sftp().chmod(abspath, mode=mode)
682
except (paramiko.SSHException, IOError) as e:
683
self._translate_io_exception(e, abspath, ': unable to mkdir',
684
failure_exc=FileExists)
686
def mkdir(self, relpath, mode=None):
687
"""Create a directory at the given path."""
688
self._mkdir(self._remote_path(relpath), mode=mode)
690
def open_write_stream(self, relpath, mode=None):
691
"""See Transport.open_write_stream."""
692
# initialise the file to zero-length
693
# this is three round trips, but we don't use this
694
# api more than once per write_group at the moment so
695
# it is a tolerable overhead. Better would be to truncate
696
# the file after opening. RBC 20070805
697
self.put_bytes_non_atomic(relpath, "", mode)
698
abspath = self._remote_path(relpath)
699
# TODO: jam 20060816 paramiko doesn't publicly expose a way to
700
# set the file mode at create time. If it does, use it.
701
# But for now, we just chmod later anyway.
704
handle = self._get_sftp().file(abspath, mode='wb')
705
handle.set_pipelined(True)
706
except (paramiko.SSHException, IOError) as e:
707
self._translate_io_exception(e, abspath,
709
_file_streams[self.abspath(relpath)] = handle
710
return FileFileStream(self, relpath, handle)
712
def _translate_io_exception(self, e, path, more_info='',
713
failure_exc=PathError):
714
"""Translate a paramiko or IOError into a friendlier exception.
716
:param e: The original exception
717
:param path: The path in question when the error is raised
718
:param more_info: Extra information that can be included,
719
such as what was going on
720
:param failure_exc: Paramiko has the super fun ability to raise completely
721
opaque errors that just set "e.args = ('Failure',)" with
723
If this parameter is set, it defines the exception
724
to raise in these cases.
726
# paramiko seems to generate detailless errors.
727
self._translate_error(e, path, raise_generic=False)
728
if getattr(e, 'args', None) is not None:
729
if (e.args == ('No such file or directory',) or
730
e.args == ('No such file',)):
731
raise NoSuchFile(path, str(e) + more_info)
732
if (e.args == ('mkdir failed',) or
733
e.args[0].startswith('syserr: File exists')):
734
raise FileExists(path, str(e) + more_info)
735
# strange but true, for the paramiko server.
736
if (e.args == ('Failure',)):
737
raise failure_exc(path, str(e) + more_info)
738
# Can be something like args = ('Directory not empty:
739
# '/srv/bazaar.launchpad.net/blah...: '
740
# [Errno 39] Directory not empty',)
741
if (e.args[0].startswith('Directory not empty: ')
742
or getattr(e, 'errno', None) == errno.ENOTEMPTY):
743
raise errors.DirectoryNotEmpty(path, str(e))
744
if e.args == ('Operation unsupported',):
745
raise errors.TransportNotPossible()
746
mutter('Raising exception with args %s', e.args)
747
if getattr(e, 'errno', None) is not None:
748
mutter('Raising exception with errno %s', e.errno)
751
def append_file(self, relpath, f, mode=None):
753
Append the text in the file-like object into the final
757
path = self._remote_path(relpath)
758
fout = self._get_sftp().file(path, 'ab')
760
self._get_sftp().chmod(path, mode)
764
except (IOError, paramiko.SSHException) as e:
765
self._translate_io_exception(e, relpath, ': unable to append')
767
def rename(self, rel_from, rel_to):
768
"""Rename without special overwriting"""
770
self._get_sftp().rename(self._remote_path(rel_from),
771
self._remote_path(rel_to))
772
except (IOError, paramiko.SSHException) as e:
773
self._translate_io_exception(e, rel_from,
774
': unable to rename to %r' % (rel_to))
776
def _rename_and_overwrite(self, abs_from, abs_to):
777
"""Do a fancy rename on the remote server.
779
Using the implementation provided by osutils.
782
sftp = self._get_sftp()
783
fancy_rename(abs_from, abs_to,
784
rename_func=sftp.rename,
785
unlink_func=sftp.remove)
786
except (IOError, paramiko.SSHException) as e:
787
self._translate_io_exception(e, abs_from,
788
': unable to rename to %r' % (abs_to))
790
def move(self, rel_from, rel_to):
791
"""Move the item at rel_from to the location at rel_to"""
792
path_from = self._remote_path(rel_from)
793
path_to = self._remote_path(rel_to)
794
self._rename_and_overwrite(path_from, path_to)
796
def delete(self, relpath):
797
"""Delete the item at relpath"""
798
path = self._remote_path(relpath)
800
self._get_sftp().remove(path)
801
except (IOError, paramiko.SSHException) as e:
802
self._translate_io_exception(e, path, ': unable to delete')
804
def external_url(self):
805
"""See breezy.transport.Transport.external_url."""
806
# the external path for SFTP is the base
810
"""Return True if this store supports listing."""
813
def list_dir(self, relpath):
815
Return a list of all files at the given location.
817
# does anything actually use this?
819
# This is at least used by copy_tree for remote upgrades.
820
# -- David Allouche 2006-08-11
821
path = self._remote_path(relpath)
823
entries = self._get_sftp().listdir(path)
824
self._report_activity(sum(map(len, entries)), 'read')
825
except (IOError, paramiko.SSHException) as e:
826
self._translate_io_exception(e, path, ': failed to list_dir')
827
return [urlutils.escape(entry) for entry in entries]
829
def rmdir(self, relpath):
830
"""See Transport.rmdir."""
831
path = self._remote_path(relpath)
833
return self._get_sftp().rmdir(path)
834
except (IOError, paramiko.SSHException) as e:
835
self._translate_io_exception(e, path, ': failed to rmdir')
837
def stat(self, relpath):
838
"""Return the stat information for a file."""
839
path = self._remote_path(relpath)
841
return self._get_sftp().lstat(path)
842
except (IOError, paramiko.SSHException) as e:
843
self._translate_io_exception(e, path, ': unable to stat')
845
def readlink(self, relpath):
846
"""See Transport.readlink."""
847
path = self._remote_path(relpath)
849
return self._get_sftp().readlink(path)
850
except (IOError, paramiko.SSHException) as e:
851
self._translate_io_exception(e, path, ': unable to readlink')
853
def symlink(self, source, link_name):
854
"""See Transport.symlink."""
856
conn = self._get_sftp()
857
sftp_retval = conn.symlink(source, link_name)
858
if SFTP_OK != sftp_retval:
859
raise TransportError(
860
'%r: unable to create symlink to %r' % (link_name, source),
863
except (IOError, paramiko.SSHException) as e:
864
self._translate_io_exception(e, link_name,
865
': unable to create symlink to %r' % (source))
867
def lock_read(self, relpath):
869
Lock the given file for shared (read) access.
870
:return: A lock object, which has an unlock() member function
872
# FIXME: there should be something clever i can do here...
873
class BogusLock(object):
874
def __init__(self, path):
878
return BogusLock(relpath)
880
def lock_write(self, relpath):
882
Lock the given file for exclusive (write) access.
883
WARNING: many transports do not support this, so trying avoid using it
885
:return: A lock object, which has an unlock() member function
887
# This is a little bit bogus, but basically, we create a file
888
# which should not already exist, and if it does, we assume
889
# that there is a lock, and if it doesn't, the we assume
890
# that we have taken the lock.
891
return SFTPLock(relpath, self)
893
def _sftp_open_exclusive(self, abspath, mode=None):
894
"""Open a remote path exclusively.
896
SFTP supports O_EXCL (SFTP_FLAG_EXCL), which fails if
897
the file already exists. However it does not expose this
898
at the higher level of SFTPClient.open(), so we have to
901
WARNING: This breaks the SFTPClient abstraction, so it
902
could easily break against an updated version of paramiko.
904
:param abspath: The remote absolute path where the file should be opened
905
:param mode: The mode permissions bits for the new file
907
# TODO: jam 20060816 Paramiko >= 1.6.2 (probably earlier) supports
908
# using the 'x' flag to indicate SFTP_FLAG_EXCL.
909
# However, there is no way to set the permission mode at open
910
# time using the sftp_client.file() functionality.
911
path = self._get_sftp()._adjust_cwd(abspath)
912
# mutter('sftp abspath %s => %s', abspath, path)
913
attr = SFTPAttributes()
916
omode = (SFTP_FLAG_WRITE | SFTP_FLAG_CREATE
917
| SFTP_FLAG_TRUNC | SFTP_FLAG_EXCL)
919
t, msg = self._get_sftp()._request(CMD_OPEN, path, omode, attr)
921
raise TransportError('Expected an SFTP handle')
922
handle = msg.get_string()
923
return SFTPFile(self._get_sftp(), handle, 'wb', -1)
924
except (paramiko.SSHException, IOError) as e:
925
self._translate_io_exception(e, abspath, ': unable to open',
926
failure_exc=FileExists)
928
def _can_roundtrip_unix_modebits(self):
929
if sys.platform == 'win32':
936
def get_test_permutations():
937
"""Return the permutations to be used in testing."""
938
from ..tests import stub_sftp
939
return [(SFTPTransport, stub_sftp.SFTPAbsoluteServer),
940
(SFTPTransport, stub_sftp.SFTPHomeDirServer),
941
(SFTPTransport, stub_sftp.SFTPSiblingAbsoluteServer),