1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25
from .lazy_import import lazy_import
26
lazy_import(globals(), """
27
from datetime import datetime
33
# We need to import both shutil and rmtree as we export the later on posix
34
# and need the former on windows
36
from shutil import rmtree
39
# We need to import both tempfile and mkdtemp as we export the later on posix
40
# and need the former on windows
42
from tempfile import mkdtemp
50
from breezy.i18n import gettext
66
# On win32, O_BINARY is used to indicate the file should
67
# be opened in binary mode, rather than text mode.
68
# On other platforms, O_BINARY doesn't exist, because
69
# they always open in binary mode, so it is okay to
70
# OR with 0 on those platforms.
71
# O_NOINHERIT and O_TEXT exists only on win32 too.
72
O_BINARY = getattr(os, 'O_BINARY', 0)
73
O_TEXT = getattr(os, 'O_TEXT', 0)
74
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
77
class UnsupportedTimezoneFormat(errors.BzrError):
79
_fmt = ('Unsupported timezone format "%(timezone)s", '
80
'options are "utc", "original", "local".')
82
def __init__(self, timezone):
83
self.timezone = timezone
86
def get_unicode_argv():
90
def make_readonly(filename):
91
"""Make a filename read-only."""
92
mod = os.lstat(filename).st_mode
93
if not stat.S_ISLNK(mod):
95
chmod_if_possible(filename, mod)
98
def make_writable(filename):
99
mod = os.lstat(filename).st_mode
100
if not stat.S_ISLNK(mod):
102
chmod_if_possible(filename, mod)
105
def chmod_if_possible(filename, mode):
106
# Set file mode if that can be safely done.
107
# Sometimes even on unix the filesystem won't allow it - see
108
# https://bugs.launchpad.net/bzr/+bug/606537
110
# It is probably faster to just do the chmod, rather than
111
# doing a stat, and then trying to compare
112
os.chmod(filename, mode)
113
except (IOError, OSError) as e:
114
# Permission/access denied seems to commonly happen on smbfs; there's
115
# probably no point warning about it.
116
# <https://bugs.launchpad.net/bzr/+bug/606537>
117
if getattr(e, 'errno') in (errno.EPERM, errno.EACCES):
118
trace.mutter("ignore error on chmod of %r: %r" % (
124
def minimum_path_selection(paths):
125
"""Return the smallset subset of paths which are outside paths.
127
:param paths: A container (and hence not None) of paths.
128
:return: A set of paths sufficient to include everything in paths via
129
is_inside, drawn from the paths parameter.
135
if isinstance(path, bytes):
136
return path.split(b'/')
138
return path.split('/')
139
sorted_paths = sorted(list(paths), key=sort_key)
141
search_paths = [sorted_paths[0]]
142
for path in sorted_paths[1:]:
143
if not is_inside(search_paths[-1], path):
144
# This path is unique, add it
145
search_paths.append(path)
147
return set(search_paths)
154
"""Return a quoted filename filename
156
This previously used backslash quoting, but that works poorly on
158
# TODO: I'm not really sure this is the best format either.x
160
if _QUOTE_RE is None:
161
_QUOTE_RE = re.compile(r'([^a-zA-Z0-9.,:/\\_~-])')
163
if _QUOTE_RE.search(f):
169
_directory_kind = 'directory'
173
"""Return the current umask"""
174
# Assume that people aren't messing with the umask while running
175
# XXX: This is not thread safe, but there is no way to get the
176
# umask without setting it
184
_directory_kind: "/",
186
'tree-reference': '+',
190
def kind_marker(kind):
192
return _kind_marker_map[kind]
194
# Slightly faster than using .get(, '') when the common case is that
199
lexists = getattr(os.path, 'lexists', None)
203
stat = getattr(os, 'lstat', os.stat)
207
if e.errno == errno.ENOENT:
210
raise errors.BzrError(
211
gettext("lstat/stat of ({0!r}): {1!r}").format(f, e))
214
def fancy_rename(old, new, rename_func, unlink_func):
215
"""A fancy rename, when you don't have atomic rename.
217
:param old: The old path, to rename from
218
:param new: The new path, to rename to
219
:param rename_func: The potentially non-atomic rename function
220
:param unlink_func: A way to delete the target file if the full rename
223
# sftp rename doesn't allow overwriting, so play tricks:
224
base = os.path.basename(new)
225
dirname = os.path.dirname(new)
226
# callers use different encodings for the paths so the following MUST
227
# respect that. We rely on python upcasting to unicode if new is unicode
228
# and keeping a str if not.
229
tmp_name = 'tmp.%s.%.9f.%d.%s' % (base, time.time(),
230
os.getpid(), rand_chars(10))
231
tmp_name = pathjoin(dirname, tmp_name)
233
# Rename the file out of the way, but keep track if it didn't exist
234
# We don't want to grab just any exception
235
# something like EACCES should prevent us from continuing
236
# The downside is that the rename_func has to throw an exception
237
# with an errno = ENOENT, or NoSuchFile
240
rename_func(new, tmp_name)
241
except (errors.NoSuchFile,):
244
# RBC 20060103 abstraction leakage: the paramiko SFTP clients rename
245
# function raises an IOError with errno is None when a rename fails.
246
# This then gets caught here.
247
if e.errno not in (None, errno.ENOENT, errno.ENOTDIR):
249
except Exception as e:
250
if (getattr(e, 'errno', None) is None
251
or e.errno not in (errno.ENOENT, errno.ENOTDIR)):
258
# This may throw an exception, in which case success will
260
rename_func(old, new)
262
except (IOError, OSError) as e:
263
# source and target may be aliases of each other (e.g. on a
264
# case-insensitive filesystem), so we may have accidentally renamed
265
# source by when we tried to rename target
266
if (file_existed and e.errno in (None, errno.ENOENT)
267
and old.lower() == new.lower()):
268
# source and target are the same file on a case-insensitive
269
# filesystem, so we don't generate an exception
275
# If the file used to exist, rename it back into place
276
# otherwise just delete it from the tmp location
278
unlink_func(tmp_name)
280
rename_func(tmp_name, new)
283
# In Python 2.4.2 and older, os.path.abspath and os.path.realpath
284
# choke on a Unicode string containing a relative path if
285
# os.getcwd() returns a non-sys.getdefaultencoding()-encoded
287
def _posix_abspath(path):
288
# jam 20060426 rather than encoding to fsencoding
289
# copy posixpath.abspath, but use os.getcwdu instead
290
if not posixpath.isabs(path):
291
path = posixpath.join(getcwd(), path)
292
return _posix_normpath(path)
295
def _posix_realpath(path):
296
return posixpath.realpath(path.encode(_fs_enc)).decode(_fs_enc)
299
def _posix_normpath(path):
300
path = posixpath.normpath(path)
301
# Bug 861008: posixpath.normpath() returns a path normalized according to
302
# the POSIX standard, which stipulates (for compatibility reasons) that two
303
# leading slashes must not be simplified to one, and only if there are 3 or
304
# more should they be simplified as one. So we treat the leading 2 slashes
305
# as a special case here by simply removing the first slash, as we consider
306
# that breaking POSIX compatibility for this obscure feature is acceptable.
307
# This is not a paranoid precaution, as we notably get paths like this when
308
# the repo is hosted at the root of the filesystem, i.e. in "/".
309
if path.startswith('//'):
314
def _posix_path_from_environ(key):
315
"""Get unicode path from `key` in environment or None if not present
317
Note that posix systems use arbitrary byte strings for filesystem objects,
318
so a path that raises BadFilenameEncoding here may still be accessible.
320
return os.environ.get(key, None)
323
def _posix_get_home_dir():
324
"""Get the home directory of the current user as a unicode path"""
325
path = posixpath.expanduser("~")
327
return path.decode(_fs_enc)
328
except AttributeError:
330
except UnicodeDecodeError:
331
raise errors.BadFilenameEncoding(path, _fs_enc)
334
def _posix_getuser_unicode():
335
"""Get username from environment or password database as unicode"""
336
return getpass.getuser()
339
def _win32_fixdrive(path):
340
"""Force drive letters to be consistent.
342
win32 is inconsistent whether it returns lower or upper case
343
and even if it was consistent the user might type the other
344
so we force it to uppercase
345
running python.exe under cmd.exe return capital C:\\
346
running win32 python inside a cygwin shell returns lowercase c:\\
348
drive, path = ntpath.splitdrive(path)
349
return drive.upper() + path
352
def _win32_abspath(path):
353
# Real ntpath.abspath doesn't have a problem with a unicode cwd
354
return _win32_fixdrive(ntpath.abspath(path).replace('\\', '/'))
357
def _win32_realpath(path):
358
# Real ntpath.realpath doesn't have a problem with a unicode cwd
359
return _win32_fixdrive(ntpath.realpath(path).replace('\\', '/'))
362
def _win32_pathjoin(*args):
363
return ntpath.join(*args).replace('\\', '/')
366
def _win32_normpath(path):
367
return _win32_fixdrive(ntpath.normpath(path).replace('\\', '/'))
371
return _win32_fixdrive(_getcwd().replace('\\', '/'))
374
def _win32_mkdtemp(*args, **kwargs):
375
return _win32_fixdrive(tempfile.mkdtemp(*args, **kwargs).replace('\\', '/'))
378
def _win32_rename(old, new):
379
"""We expect to be able to atomically replace 'new' with old.
381
On win32, if new exists, it must be moved out of the way first,
385
fancy_rename(old, new, rename_func=os.rename, unlink_func=os.unlink)
387
if e.errno in (errno.EPERM, errno.EACCES, errno.EBUSY, errno.EINVAL):
388
# If we try to rename a non-existant file onto cwd, we get
389
# EPERM or EACCES instead of ENOENT, this will raise ENOENT
390
# if the old path doesn't exist, sometimes we get EACCES
391
# On Linux, we seem to get EBUSY, on Mac we get EINVAL
397
return unicodedata.normalize('NFC', _getcwd())
400
def _rename_wrap_exception(rename_func):
401
"""Adds extra information to any exceptions that come from rename().
403
The exception has an updated message and 'old_filename' and 'new_filename'
407
def _rename_wrapper(old, new):
409
rename_func(old, new)
411
detailed_error = OSError(e.errno, e.strerror +
412
" [occurred when renaming '%s' to '%s']" %
414
detailed_error.old_filename = old
415
detailed_error.new_filename = new
418
return _rename_wrapper
424
# Default rename wraps os.rename()
425
rename = _rename_wrap_exception(os.rename)
427
# Default is to just use the python builtins, but these can be rebound on
428
# particular platforms.
429
abspath = _posix_abspath
430
realpath = _posix_realpath
431
pathjoin = os.path.join
432
normpath = _posix_normpath
433
path_from_environ = _posix_path_from_environ
434
_get_home_dir = _posix_get_home_dir
435
getuser_unicode = _posix_getuser_unicode
437
dirname = os.path.dirname
438
basename = os.path.basename
439
split = os.path.split
440
splitext = os.path.splitext
441
# These were already lazily imported into local scope
442
# mkdtemp = tempfile.mkdtemp
443
# rmtree = shutil.rmtree
452
MIN_ABS_PATHLENGTH = 1
455
if sys.platform == 'win32':
456
abspath = _win32_abspath
457
realpath = _win32_realpath
458
pathjoin = _win32_pathjoin
459
normpath = _win32_normpath
460
getcwd = _win32_getcwd
461
mkdtemp = _win32_mkdtemp
462
rename = _rename_wrap_exception(_win32_rename)
464
from . import _walkdirs_win32
468
lstat = _walkdirs_win32.lstat
469
fstat = _walkdirs_win32.fstat
470
wrap_stat = _walkdirs_win32.wrap_stat
472
MIN_ABS_PATHLENGTH = 3
474
def _win32_delete_readonly(function, path, excinfo):
475
"""Error handler for shutil.rmtree function [for win32]
476
Helps to remove files and dirs marked as read-only.
478
exception = excinfo[1]
479
if function in (os.remove, os.rmdir) \
480
and isinstance(exception, OSError) \
481
and exception.errno == errno.EACCES:
487
def rmtree(path, ignore_errors=False, onerror=_win32_delete_readonly):
488
"""Replacer for shutil.rmtree: could remove readonly dirs/files"""
489
return shutil.rmtree(path, ignore_errors, onerror)
491
get_unicode_argv = getattr(win32utils, 'get_unicode_argv', get_unicode_argv)
492
path_from_environ = win32utils.get_environ_unicode
493
_get_home_dir = win32utils.get_home_location
494
getuser_unicode = win32utils.get_user_name
496
elif sys.platform == 'darwin':
500
def get_terminal_encoding(trace=False):
501
"""Find the best encoding for printing to the screen.
503
This attempts to check both sys.stdout and sys.stdin to see
504
what encoding they are in, and if that fails it falls back to
505
osutils.get_user_encoding().
506
The problem is that on Windows, locale.getpreferredencoding()
507
is not the same encoding as that used by the console:
508
http://mail.python.org/pipermail/python-list/2003-May/162357.html
510
On my standard US Windows XP, the preferred encoding is
511
cp1252, but the console is cp437
513
:param trace: If True trace the selected encoding via mutter().
515
from .trace import mutter
516
output_encoding = getattr(sys.stdout, 'encoding', None)
517
if not output_encoding:
518
input_encoding = getattr(sys.stdin, 'encoding', None)
519
if not input_encoding:
520
output_encoding = get_user_encoding()
522
mutter('encoding stdout as osutils.get_user_encoding() %r',
525
output_encoding = input_encoding
527
mutter('encoding stdout as sys.stdin encoding %r',
531
mutter('encoding stdout as sys.stdout encoding %r', output_encoding)
532
if output_encoding == 'cp0':
533
# invalid encoding (cp0 means 'no codepage' on Windows)
534
output_encoding = get_user_encoding()
536
mutter('cp0 is invalid encoding.'
537
' encoding stdout as osutils.get_user_encoding() %r',
541
codecs.lookup(output_encoding)
543
sys.stderr.write('brz: warning:'
544
' unknown terminal encoding %s.\n'
545
' Using encoding %s instead.\n'
546
% (output_encoding, get_user_encoding())
548
output_encoding = get_user_encoding()
550
return output_encoding
553
def normalizepath(f):
554
if getattr(os.path, 'realpath', None) is not None:
558
[p, e] = os.path.split(f)
559
if e == "" or e == "." or e == "..":
562
return pathjoin(F(p), e)
566
"""True if f is an accessible directory."""
568
return stat.S_ISDIR(os.lstat(f)[stat.ST_MODE])
574
"""True if f is a regular file."""
576
return stat.S_ISREG(os.lstat(f)[stat.ST_MODE])
582
"""True if f is a symlink."""
584
return stat.S_ISLNK(os.lstat(f)[stat.ST_MODE])
589
def is_inside(dir, fname):
590
"""True if fname is inside dir.
592
The parameters should typically be passed to osutils.normpath first, so
593
that . and .. and repeated slashes are eliminated, and the separators
594
are canonical for the platform.
596
The empty string as a dir name is taken as top-of-tree and matches
599
# XXX: Most callers of this can actually do something smarter by
600
# looking at the inventory
607
if isinstance(dir, bytes):
608
if not dir.endswith(b'/'):
611
if not dir.endswith('/'):
614
return fname.startswith(dir)
617
def is_inside_any(dir_list, fname):
618
"""True if fname is inside any of given dirs."""
619
for dirname in dir_list:
620
if is_inside(dirname, fname):
625
def is_inside_or_parent_of_any(dir_list, fname):
626
"""True if fname is a child or a parent of any of the given files."""
627
for dirname in dir_list:
628
if is_inside(dirname, fname) or is_inside(fname, dirname):
633
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
634
report_activity=None, direction='read'):
635
"""Copy contents of one file to another.
637
The read_length can either be -1 to read to end-of-file (EOF) or
638
it can specify the maximum number of bytes to read.
640
The buff_size represents the maximum size for each read operation
641
performed on from_file.
643
:param report_activity: Call this as bytes are read, see
644
Transport._report_activity
645
:param direction: Will be passed to report_activity
647
:return: The number of bytes copied.
651
# read specified number of bytes
653
while read_length > 0:
654
num_bytes_to_read = min(read_length, buff_size)
656
block = from_file.read(num_bytes_to_read)
660
if report_activity is not None:
661
report_activity(len(block), direction)
664
actual_bytes_read = len(block)
665
read_length -= actual_bytes_read
666
length += actual_bytes_read
670
block = from_file.read(buff_size)
674
if report_activity is not None:
675
report_activity(len(block), direction)
681
def pump_string_file(bytes, file_handle, segment_size=None):
682
"""Write bytes to file_handle in many smaller writes.
684
:param bytes: The string to write.
685
:param file_handle: The file to write to.
687
# Write data in chunks rather than all at once, because very large
688
# writes fail on some platforms (e.g. Windows with SMB mounted
691
segment_size = 5242880 # 5MB
692
offsets = range(0, len(bytes), segment_size)
693
view = memoryview(bytes)
694
write = file_handle.write
695
for offset in offsets:
696
write(view[offset:offset + segment_size])
699
def file_iterator(input_file, readsize=32768):
701
b = input_file.read(readsize)
707
# GZ 2017-09-16: Makes sense in general for hexdigest() result to be text, but
708
# used as bytes through most interfaces so encode with this wrapper.
709
def _hexdigest(hashobj):
710
return hashobj.hexdigest().encode()
714
"""Calculate the hexdigest of an open file.
716
The file cursor should be already at the start.
728
def size_sha_file(f):
729
"""Calculate the size and hexdigest of an open file.
731
The file cursor should be already at the start and
732
the caller is responsible for closing the file afterwards.
743
return size, _hexdigest(s)
746
def sha_file_by_name(fname):
747
"""Calculate the SHA1 of a file by reading the full text"""
749
f = os.open(fname, os.O_RDONLY | O_BINARY | O_NOINHERIT)
752
b = os.read(f, 1 << 16)
760
def sha_strings(strings, _factory=sha):
761
"""Return the sha-1 of concatenation of strings"""
763
for string in strings:
768
def sha_string(f, _factory=sha):
769
# GZ 2017-09-16: Dodgy if factory is ever not sha, probably shouldn't be.
770
return _hexdigest(_factory(f))
773
def fingerprint_file(f):
775
return {'size': len(b),
776
'sha1': _hexdigest(sha(b))}
779
def compare_files(a, b):
780
"""Returns true if equal in contents"""
791
def local_time_offset(t=None):
792
"""Return offset of local zone from GMT, either at present or at time t."""
795
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
796
return offset.days * 86400 + offset.seconds
799
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
800
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
803
def format_date(t, offset=0, timezone='original', date_fmt=None,
805
"""Return a formatted date string.
807
:param t: Seconds since the epoch.
808
:param offset: Timezone offset in seconds east of utc.
809
:param timezone: How to display the time: 'utc', 'original' for the
810
timezone specified by offset, or 'local' for the process's current
812
:param date_fmt: strftime format.
813
:param show_offset: Whether to append the timezone.
815
(date_fmt, tt, offset_str) = \
816
_format_date(t, offset, timezone, date_fmt, show_offset)
817
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
818
date_str = time.strftime(date_fmt, tt)
819
return date_str + offset_str
822
# Cache of formatted offset strings
826
def format_date_with_offset_in_original_timezone(t, offset=0,
827
_cache=_offset_cache):
828
"""Return a formatted date string in the original timezone.
830
This routine may be faster then format_date.
832
:param t: Seconds since the epoch.
833
:param offset: Timezone offset in seconds east of utc.
837
tt = time.gmtime(t + offset)
838
date_fmt = _default_format_by_weekday_num[tt[6]]
839
date_str = time.strftime(date_fmt, tt)
840
offset_str = _cache.get(offset, None)
841
if offset_str is None:
842
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
843
_cache[offset] = offset_str
844
return date_str + offset_str
847
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
849
"""Return an unicode date string formatted according to the current locale.
851
:param t: Seconds since the epoch.
852
:param offset: Timezone offset in seconds east of utc.
853
:param timezone: How to display the time: 'utc', 'original' for the
854
timezone specified by offset, or 'local' for the process's current
856
:param date_fmt: strftime format.
857
:param show_offset: Whether to append the timezone.
859
(date_fmt, tt, offset_str) = \
860
_format_date(t, offset, timezone, date_fmt, show_offset)
861
date_str = time.strftime(date_fmt, tt)
862
if not isinstance(date_str, str):
863
date_str = date_str.decode(get_user_encoding(), 'replace')
864
return date_str + offset_str
867
def _format_date(t, offset, timezone, date_fmt, show_offset):
868
if timezone == 'utc':
871
elif timezone == 'original':
874
tt = time.gmtime(t + offset)
875
elif timezone == 'local':
876
tt = time.localtime(t)
877
offset = local_time_offset(t)
879
raise UnsupportedTimezoneFormat(timezone)
881
date_fmt = "%a %Y-%m-%d %H:%M:%S"
883
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
886
return (date_fmt, tt, offset_str)
889
def compact_date(when):
890
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
893
def format_delta(delta):
894
"""Get a nice looking string for a time delta.
896
:param delta: The time difference in seconds, can be positive or negative.
897
positive indicates time in the past, negative indicates time in the
898
future. (usually time.time() - stored_time)
899
:return: String formatted to show approximate resolution
905
direction = 'in the future'
909
if seconds < 90: # print seconds up to 90 seconds
911
return '%d second %s' % (seconds, direction,)
913
return '%d seconds %s' % (seconds, direction)
915
minutes = int(seconds / 60)
916
seconds -= 60 * minutes
921
if minutes < 90: # print minutes, seconds up to 90 minutes
923
return '%d minute, %d second%s %s' % (
924
minutes, seconds, plural_seconds, direction)
926
return '%d minutes, %d second%s %s' % (
927
minutes, seconds, plural_seconds, direction)
929
hours = int(minutes / 60)
930
minutes -= 60 * hours
937
return '%d hour, %d minute%s %s' % (hours, minutes,
938
plural_minutes, direction)
939
return '%d hours, %d minute%s %s' % (hours, minutes,
940
plural_minutes, direction)
944
"""Return size of given open file."""
945
return os.fstat(f.fileno())[stat.ST_SIZE]
948
# Alias os.urandom to support platforms (which?) without /dev/urandom and
949
# override if it doesn't work. Avoid checking on windows where there is
950
# significant initialisation cost that can be avoided for some bzr calls.
952
rand_bytes = os.urandom
954
if rand_bytes.__module__ != "nt":
957
except NotImplementedError:
958
# not well seeded, but better than nothing
963
s += chr(random.randint(0, 255))
968
ALNUM = '0123456789abcdefghijklmnopqrstuvwxyz'
972
"""Return a random string of num alphanumeric characters
974
The result only contains lowercase chars because it may be used on
975
case-insensitive filesystems.
978
for raw_byte in rand_bytes(num):
979
s += ALNUM[raw_byte % 36]
983
# TODO: We could later have path objects that remember their list
984
# decomposition (might be too tricksy though.)
987
"""Turn string into list of parts."""
988
use_bytes = isinstance(p, bytes)
989
if os.path.sep == '\\':
990
# split on either delimiter because people might use either on
993
ps = re.split(b'[\\\\/]', p)
995
ps = re.split(r'[\\/]', p)
1004
current_empty_dir = (b'.', b'')
1007
current_empty_dir = ('.', '')
1012
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1013
elif f in current_empty_dir:
1022
if (f == '..') or (f is None) or (f == ''):
1023
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1027
def parent_directories(filename):
1028
"""Return the list of parent directories, deepest first.
1030
For example, parent_directories("a/b/c") -> ["a/b", "a"].
1033
parts = splitpath(dirname(filename))
1035
parents.append(joinpath(parts))
1040
_extension_load_failures = []
1043
def failed_to_load_extension(exception):
1044
"""Handle failing to load a binary extension.
1046
This should be called from the ImportError block guarding the attempt to
1047
import the native extension. If this function returns, the pure-Python
1048
implementation should be loaded instead::
1051
>>> import breezy._fictional_extension_pyx
1052
>>> except ImportError, e:
1053
>>> breezy.osutils.failed_to_load_extension(e)
1054
>>> import breezy._fictional_extension_py
1056
# NB: This docstring is just an example, not a doctest, because doctest
1057
# currently can't cope with the use of lazy imports in this namespace --
1060
# This currently doesn't report the failure at the time it occurs, because
1061
# they tend to happen very early in startup when we can't check config
1062
# files etc, and also we want to report all failures but not spam the user
1064
exception_str = str(exception)
1065
if exception_str not in _extension_load_failures:
1066
trace.mutter("failed to load compiled extension: %s" % exception_str)
1067
_extension_load_failures.append(exception_str)
1070
def report_extension_load_failures():
1071
if not _extension_load_failures:
1073
if config.GlobalConfig().suppress_warning('missing_extensions'):
1075
# the warnings framework should by default show this only once
1076
from .trace import warning
1078
"brz: warning: some compiled extensions could not be loaded; "
1079
"see ``brz help missing-extensions``")
1080
# we no longer show the specific missing extensions here, because it makes
1081
# the message too long and scary - see
1082
# https://bugs.launchpad.net/bzr/+bug/430529
1086
from ._chunks_to_lines_pyx import chunks_to_lines
1087
except ImportError as e:
1088
failed_to_load_extension(e)
1089
from ._chunks_to_lines_py import chunks_to_lines
1093
"""Split s into lines, but without removing the newline characters."""
1094
# Trivially convert a fulltext into a 'chunked' representation, and let
1095
# chunks_to_lines do the heavy lifting.
1096
if isinstance(s, bytes):
1097
# chunks_to_lines only supports 8-bit strings
1098
return chunks_to_lines([s])
1100
return _split_lines(s)
1103
def _split_lines(s):
1104
"""Split s into lines, but without removing the newline characters.
1106
This supports Unicode or plain string objects.
1108
nl = b'\n' if isinstance(s, bytes) else u'\n'
1110
result = [line + nl for line in lines[:-1]]
1112
result.append(lines[-1])
1116
def hardlinks_good():
1117
return sys.platform not in ('win32', 'cygwin', 'darwin')
1120
def link_or_copy(src, dest):
1121
"""Hardlink a file, or copy it if it can't be hardlinked."""
1122
if not hardlinks_good():
1123
shutil.copyfile(src, dest)
1127
except (OSError, IOError) as e:
1128
if e.errno != errno.EXDEV:
1130
shutil.copyfile(src, dest)
1133
def delete_any(path):
1134
"""Delete a file, symlink or directory.
1136
Will delete even if readonly.
1139
_delete_file_or_dir(path)
1140
except (OSError, IOError) as e:
1141
if e.errno in (errno.EPERM, errno.EACCES):
1142
# make writable and try again
1145
except (OSError, IOError):
1147
_delete_file_or_dir(path)
1152
def _delete_file_or_dir(path):
1153
# Look Before You Leap (LBYL) is appropriate here instead of Easier to Ask for
1154
# Forgiveness than Permission (EAFP) because:
1155
# - root can damage a solaris file system by using unlink,
1156
# - unlink raises different exceptions on different OSes (linux: EISDIR, win32:
1157
# EACCES, OSX: EPERM) when invoked on a directory.
1158
if isdir(path): # Takes care of symlinks
1165
if getattr(os, 'symlink', None) is not None:
1171
def has_hardlinks():
1172
if getattr(os, 'link', None) is not None:
1178
def host_os_dereferences_symlinks():
1179
return (has_symlinks()
1180
and sys.platform not in ('cygwin', 'win32'))
1183
def readlink(abspath):
1184
"""Return a string representing the path to which the symbolic link points.
1186
:param abspath: The link absolute unicode path.
1188
This his guaranteed to return the symbolic link in unicode in all python
1191
link = abspath.encode(_fs_enc)
1192
target = os.readlink(link)
1193
target = target.decode(_fs_enc)
1197
def contains_whitespace(s):
1198
"""True if there are any whitespace characters in s."""
1199
# string.whitespace can include '\xa0' in certain locales, because it is
1200
# considered "non-breaking-space" as part of ISO-8859-1. But it
1201
# 1) Isn't a breaking whitespace
1202
# 2) Isn't one of ' \t\r\n' which are characters we sometimes use as
1204
# 3) '\xa0' isn't unicode safe since it is >128.
1206
if isinstance(s, str):
1209
ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f')
1217
def contains_linebreaks(s):
1218
"""True if there is any vertical whitespace in s."""
1226
def relpath(base, path):
1227
"""Return path relative to base, or raise PathNotChild exception.
1229
The path may be either an absolute path or a path relative to the
1230
current working directory.
1232
os.path.commonprefix (python2.4) has a bad bug that it works just
1233
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
1234
avoids that problem.
1236
NOTE: `base` should not have a trailing slash otherwise you'll get
1237
PathNotChild exceptions regardless of `path`.
1240
if len(base) < MIN_ABS_PATHLENGTH:
1241
# must have space for e.g. a drive letter
1242
raise ValueError(gettext('%r is too short to calculate a relative path')
1250
if len(head) <= len(base) and head != base:
1251
raise errors.PathNotChild(rp, base)
1254
head, tail = split(head)
1259
return pathjoin(*reversed(s))
1264
def _cicp_canonical_relpath(base, path):
1265
"""Return the canonical path relative to base.
1267
Like relpath, but on case-insensitive-case-preserving file-systems, this
1268
will return the relpath as stored on the file-system rather than in the
1269
case specified in the input string, for all existing portions of the path.
1271
This will cause O(N) behaviour if called for every path in a tree; if you
1272
have a number of paths to convert, you should use canonical_relpaths().
1274
# TODO: it should be possible to optimize this for Windows by using the
1275
# win32 API FindFiles function to look for the specified name - but using
1276
# os.listdir() still gives us the correct, platform agnostic semantics in
1279
rel = relpath(base, path)
1280
# '.' will have been turned into ''
1284
abs_base = abspath(base)
1286
_listdir = os.listdir
1288
# use an explicit iterator so we can easily consume the rest on early exit.
1289
bit_iter = iter(rel.split('/'))
1290
for bit in bit_iter:
1293
next_entries = _listdir(current)
1294
except OSError: # enoent, eperm, etc
1295
# We can't find this in the filesystem, so just append the
1297
current = pathjoin(current, bit, *list(bit_iter))
1299
for look in next_entries:
1300
if lbit == look.lower():
1301
current = pathjoin(current, look)
1304
# got to the end, nothing matched, so we just return the
1305
# non-existing bits as they were specified (the filename may be
1306
# the target of a move, for example).
1307
current = pathjoin(current, bit, *list(bit_iter))
1309
return current[len(abs_base):].lstrip('/')
1312
# XXX - TODO - we need better detection/integration of case-insensitive
1313
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1314
# filesystems), for example, so could probably benefit from the same basic
1315
# support there. For now though, only Windows and OSX get that support, and
1316
# they get it for *all* file-systems!
1317
if sys.platform in ('win32', 'darwin'):
1318
canonical_relpath = _cicp_canonical_relpath
1320
canonical_relpath = relpath
1323
def canonical_relpaths(base, paths):
1324
"""Create an iterable to canonicalize a sequence of relative paths.
1326
The intent is for this implementation to use a cache, vastly speeding
1327
up multiple transformations in the same directory.
1329
# but for now, we haven't optimized...
1330
return [canonical_relpath(base, p) for p in paths]
1333
def decode_filename(filename):
1334
"""Decode the filename using the filesystem encoding
1336
If it is unicode, it is returned.
1337
Otherwise it is decoded from the the filesystem's encoding. If decoding
1338
fails, a errors.BadFilenameEncoding exception is raised.
1340
if isinstance(filename, str):
1343
return filename.decode(_fs_enc)
1344
except UnicodeDecodeError:
1345
raise errors.BadFilenameEncoding(filename, _fs_enc)
1348
def safe_unicode(unicode_or_utf8_string):
1349
"""Coerce unicode_or_utf8_string into unicode.
1351
If it is unicode, it is returned.
1352
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1353
wrapped in a BzrBadParameterNotUnicode exception.
1355
if isinstance(unicode_or_utf8_string, str):
1356
return unicode_or_utf8_string
1358
return unicode_or_utf8_string.decode('utf8')
1359
except UnicodeDecodeError:
1360
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1363
def safe_utf8(unicode_or_utf8_string):
1364
"""Coerce unicode_or_utf8_string to a utf8 string.
1366
If it is a str, it is returned.
1367
If it is Unicode, it is encoded into a utf-8 string.
1369
if isinstance(unicode_or_utf8_string, bytes):
1370
# TODO: jam 20070209 This is overkill, and probably has an impact on
1371
# performance if we are dealing with lots of apis that want a
1374
# Make sure it is a valid utf-8 string
1375
unicode_or_utf8_string.decode('utf-8')
1376
except UnicodeDecodeError:
1377
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1378
return unicode_or_utf8_string
1379
return unicode_or_utf8_string.encode('utf-8')
1382
def safe_revision_id(unicode_or_utf8_string):
1383
"""Revision ids should now be utf8, but at one point they were unicode.
1385
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1387
:return: None or a utf8 revision id.
1389
if (unicode_or_utf8_string is None
1390
or unicode_or_utf8_string.__class__ == bytes):
1391
return unicode_or_utf8_string
1392
raise TypeError('Unicode revision ids are no longer supported. '
1393
'Revision id generators should be creating utf8 revision '
1397
def safe_file_id(unicode_or_utf8_string):
1398
"""File ids should now be utf8, but at one point they were unicode.
1400
This is the same as safe_utf8, except it uses the cached encode functions
1401
to save a little bit of performance.
1403
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1405
:return: None or a utf8 file id.
1407
if (unicode_or_utf8_string is None
1408
or unicode_or_utf8_string.__class__ == bytes):
1409
return unicode_or_utf8_string
1410
raise TypeError('Unicode file ids are no longer supported. '
1411
'File id generators should be creating utf8 file ids.')
1414
_platform_normalizes_filenames = False
1415
if sys.platform == 'darwin':
1416
_platform_normalizes_filenames = True
1419
def normalizes_filenames():
1420
"""Return True if this platform normalizes unicode filenames.
1424
return _platform_normalizes_filenames
1427
def _accessible_normalized_filename(path):
1428
"""Get the unicode normalized path, and if you can access the file.
1430
On platforms where the system normalizes filenames (Mac OSX),
1431
you can access a file by any path which will normalize correctly.
1432
On platforms where the system does not normalize filenames
1433
(everything else), you have to access a file by its exact path.
1435
Internally, bzr only supports NFC normalization, since that is
1436
the standard for XML documents.
1438
So return the normalized path, and a flag indicating if the file
1439
can be accessed by that path.
1442
if isinstance(path, bytes):
1443
path = path.decode(sys.getfilesystemencoding())
1444
return unicodedata.normalize('NFC', path), True
1447
def _inaccessible_normalized_filename(path):
1448
__doc__ = _accessible_normalized_filename.__doc__
1450
if isinstance(path, bytes):
1451
path = path.decode(sys.getfilesystemencoding())
1452
normalized = unicodedata.normalize('NFC', path)
1453
return normalized, normalized == path
1456
if _platform_normalizes_filenames:
1457
normalized_filename = _accessible_normalized_filename
1459
normalized_filename = _inaccessible_normalized_filename
1462
def set_signal_handler(signum, handler, restart_syscall=True):
1463
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1464
on platforms that support that.
1466
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1467
automatically restart (by calling `signal.siginterrupt(signum,
1468
False)`). May be ignored if the feature is not available on this
1469
platform or Python version.
1473
siginterrupt = signal.siginterrupt
1475
# This python implementation doesn't provide signal support, hence no
1478
except AttributeError:
1479
# siginterrupt doesn't exist on this platform, or for this version
1481
def siginterrupt(signum, flag): return None
1483
def sig_handler(*args):
1484
# Python resets the siginterrupt flag when a signal is
1485
# received. <http://bugs.python.org/issue8354>
1486
# As a workaround for some cases, set it back the way we want it.
1487
siginterrupt(signum, False)
1488
# Now run the handler function passed to set_signal_handler.
1491
sig_handler = handler
1492
old_handler = signal.signal(signum, sig_handler)
1494
siginterrupt(signum, False)
1498
default_terminal_width = 80
1499
"""The default terminal width for ttys.
1501
This is defined so that higher levels can share a common fallback value when
1502
terminal_width() returns None.
1505
# Keep some state so that terminal_width can detect if _terminal_size has
1506
# returned a different size since the process started. See docstring and
1507
# comments of terminal_width for details.
1508
# _terminal_size_state has 3 possible values: no_data, unchanged, and changed.
1509
_terminal_size_state = 'no_data'
1510
_first_terminal_size = None
1513
def terminal_width():
1514
"""Return terminal width.
1516
None is returned if the width can't established precisely.
1519
- if BRZ_COLUMNS is set, returns its value
1520
- if there is no controlling terminal, returns None
1521
- query the OS, if the queried size has changed since the last query,
1523
- if COLUMNS is set, returns its value,
1524
- if the OS has a value (even though it's never changed), return its value.
1526
From there, we need to query the OS to get the size of the controlling
1529
On Unices we query the OS by:
1530
- get termios.TIOCGWINSZ
1531
- if an error occurs or a negative value is obtained, returns None
1533
On Windows we query the OS by:
1534
- win32utils.get_console_size() decides,
1535
- returns None on error (provided default value)
1537
# Note to implementors: if changing the rules for determining the width,
1538
# make sure you've considered the behaviour in these cases:
1539
# - M-x shell in emacs, where $COLUMNS is set and TIOCGWINSZ returns 0,0.
1540
# - brz log | less, in bash, where $COLUMNS not set and TIOCGWINSZ returns
1542
# - (add more interesting cases here, if you find any)
1543
# Some programs implement "Use $COLUMNS (if set) until SIGWINCH occurs",
1544
# but we don't want to register a signal handler because it is impossible
1545
# to do so without risking EINTR errors in Python <= 2.6.5 (see
1546
# <http://bugs.python.org/issue8354>). Instead we check TIOCGWINSZ every
1547
# time so we can notice if the reported size has changed, which should have
1550
# If BRZ_COLUMNS is set, take it, user is always right
1551
# Except if they specified 0 in which case, impose no limit here
1553
width = int(os.environ['BRZ_COLUMNS'])
1554
except (KeyError, ValueError):
1556
if width is not None:
1562
isatty = getattr(sys.stdout, 'isatty', None)
1563
if isatty is None or not isatty():
1564
# Don't guess, setting BRZ_COLUMNS is the recommended way to override.
1568
width, height = os_size = _terminal_size(None, None)
1569
global _first_terminal_size, _terminal_size_state
1570
if _terminal_size_state == 'no_data':
1571
_first_terminal_size = os_size
1572
_terminal_size_state = 'unchanged'
1573
elif (_terminal_size_state == 'unchanged' and
1574
_first_terminal_size != os_size):
1575
_terminal_size_state = 'changed'
1577
# If the OS claims to know how wide the terminal is, and this value has
1578
# ever changed, use that.
1579
if _terminal_size_state == 'changed':
1580
if width is not None and width > 0:
1583
# If COLUMNS is set, use it.
1585
return int(os.environ['COLUMNS'])
1586
except (KeyError, ValueError):
1589
# Finally, use an unchanged size from the OS, if we have one.
1590
if _terminal_size_state == 'unchanged':
1591
if width is not None and width > 0:
1594
# The width could not be determined.
1598
def _win32_terminal_size(width, height):
1599
width, height = win32utils.get_console_size(
1600
defaultx=width, defaulty=height)
1601
return width, height
1604
def _ioctl_terminal_size(width, height):
1609
s = struct.pack('HHHH', 0, 0, 0, 0)
1610
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
1611
height, width = struct.unpack('HHHH', x)[0:2]
1612
except (IOError, AttributeError):
1614
return width, height
1617
_terminal_size = None
1618
"""Returns the terminal size as (width, height).
1620
:param width: Default value for width.
1621
:param height: Default value for height.
1623
This is defined specifically for each OS and query the size of the controlling
1624
terminal. If any error occurs, the provided default values should be returned.
1626
if sys.platform == 'win32':
1627
_terminal_size = _win32_terminal_size
1629
_terminal_size = _ioctl_terminal_size
1632
def supports_executable(path):
1633
"""Return if filesystem at path supports executable bit.
1635
:param path: Path for which to check the file system
1636
:return: boolean indicating whether executable bit can be stored/relied upon
1638
if sys.platform == 'win32':
1641
fs_type = get_fs_type(path)
1642
except errors.DependencyNotPresent as e:
1643
trace.mutter('Unable to get fs type for %r: %s', path, e)
1645
if fs_type in ('vfat', 'ntfs'):
1646
# filesystems known to not support executable bit
1651
def supports_symlinks(path):
1652
"""Return if the filesystem at path supports the creation of symbolic links.
1655
if not has_symlinks():
1658
fs_type = get_fs_type(path)
1659
except errors.DependencyNotPresent as e:
1660
trace.mutter('Unable to get fs type for %r: %s', path, e)
1662
if fs_type in ('vfat', 'ntfs'):
1663
# filesystems known to not support symlinks
1668
def supports_posix_readonly():
1669
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1671
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1672
directory controls creation/deletion, etc.
1674
And under win32, readonly means that the directory itself cannot be
1675
deleted. The contents of a readonly directory can be changed, unlike POSIX
1676
where files in readonly directories cannot be added, deleted or renamed.
1678
return sys.platform != "win32"
1681
def set_or_unset_env(env_variable, value):
1682
"""Modify the environment, setting or removing the env_variable.
1684
:param env_variable: The environment variable in question
1685
:param value: The value to set the environment to. If None, then
1686
the variable will be removed.
1687
:return: The original value of the environment variable.
1689
orig_val = os.environ.get(env_variable)
1691
if orig_val is not None:
1692
del os.environ[env_variable]
1694
os.environ[env_variable] = value
1698
_validWin32PathRE = re.compile(r'^([A-Za-z]:[/\\])?[^:<>*"?\|]*$')
1701
def check_legal_path(path):
1702
"""Check whether the supplied path is legal.
1703
This is only required on Windows, so we don't test on other platforms
1706
if sys.platform != "win32":
1708
if _validWin32PathRE.match(path) is None:
1709
raise errors.IllegalPath(path)
1712
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1715
def _is_error_enotdir(e):
1716
"""Check if this exception represents ENOTDIR.
1718
Unfortunately, python is very inconsistent about the exception
1719
here. The cases are:
1720
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1721
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1722
which is the windows error code.
1723
3) Windows, Python2.5 uses errno == EINVAL and
1724
winerror == ERROR_DIRECTORY
1726
:param e: An Exception object (expected to be OSError with an errno
1727
attribute, but we should be able to cope with anything)
1728
:return: True if this represents an ENOTDIR error. False otherwise.
1730
en = getattr(e, 'errno', None)
1731
if (en == errno.ENOTDIR or
1732
(sys.platform == 'win32' and
1733
(en == _WIN32_ERROR_DIRECTORY or
1735
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
1741
def walkdirs(top, prefix=""):
1742
"""Yield data about all the directories in a tree.
1744
This yields all the data about the contents of a directory at a time.
1745
After each directory has been yielded, if the caller has mutated the list
1746
to exclude some directories, they are then not descended into.
1748
The data yielded is of the form:
1749
((directory-relpath, directory-path-from-top),
1750
[(relpath, basename, kind, lstat, path-from-top), ...]),
1751
- directory-relpath is the relative path of the directory being returned
1752
with respect to top. prefix is prepended to this.
1753
- directory-path-from-root is the path including top for this directory.
1754
It is suitable for use with os functions.
1755
- relpath is the relative path within the subtree being walked.
1756
- basename is the basename of the path
1757
- kind is the kind of the file now. If unknown then the file is not
1758
present within the tree - but it may be recorded as versioned. See
1760
- lstat is the stat data *if* the file was statted.
1761
- planned, not implemented:
1762
path_from_tree_root is the path from the root of the tree.
1764
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1765
allows one to walk a subtree but get paths that are relative to a tree
1767
:return: an iterator over the dirs.
1769
# TODO there is a bit of a smell where the results of the directory-
1770
# summary in this, and the path from the root, may not agree
1771
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
1772
# potentially confusing output. We should make this more robust - but
1773
# not at a speed cost. RBC 20060731
1775
_directory = _directory_kind
1776
_listdir = os.listdir
1777
_kind_from_mode = file_kind_from_stat_mode
1778
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
1780
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1781
relroot, _, _, _, top = pending.pop()
1783
relprefix = relroot + u'/'
1786
top_slash = top + u'/'
1789
append = dirblock.append
1791
names = sorted(map(decode_filename, _listdir(top)))
1792
except OSError as e:
1793
if not _is_error_enotdir(e):
1797
abspath = top_slash + name
1798
statvalue = _lstat(abspath)
1799
kind = _kind_from_mode(statvalue.st_mode)
1800
append((relprefix + name, name, kind, statvalue, abspath))
1801
yield (relroot, top), dirblock
1803
# push the user specified dirs from dirblock
1804
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1807
class DirReader(object):
1808
"""An interface for reading directories."""
1810
def top_prefix_to_starting_dir(self, top, prefix=""):
1811
"""Converts top and prefix to a starting dir entry
1813
:param top: A utf8 path
1814
:param prefix: An optional utf8 path to prefix output relative paths
1816
:return: A tuple starting with prefix, and ending with the native
1819
raise NotImplementedError(self.top_prefix_to_starting_dir)
1821
def read_dir(self, prefix, top):
1822
"""Read a specific dir.
1824
:param prefix: A utf8 prefix to be preprended to the path basenames.
1825
:param top: A natively encoded path to read.
1826
:return: A list of the directories contents. Each item contains:
1827
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1829
raise NotImplementedError(self.read_dir)
1832
_selected_dir_reader = None
1835
def _walkdirs_utf8(top, prefix=""):
1836
"""Yield data about all the directories in a tree.
1838
This yields the same information as walkdirs() only each entry is yielded
1839
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1840
are returned as exact byte-strings.
1842
:return: yields a tuple of (dir_info, [file_info])
1843
dir_info is (utf8_relpath, path-from-top)
1844
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1845
if top is an absolute path, path-from-top is also an absolute path.
1846
path-from-top might be unicode or utf8, but it is the correct path to
1847
pass to os functions to affect the file in question. (such as os.lstat)
1849
global _selected_dir_reader
1850
if _selected_dir_reader is None:
1851
if sys.platform == "win32":
1853
from ._walkdirs_win32 import Win32ReadDir
1854
_selected_dir_reader = Win32ReadDir()
1857
elif _fs_enc in ('utf-8', 'ascii'):
1859
from ._readdir_pyx import UTF8DirReader
1860
_selected_dir_reader = UTF8DirReader()
1861
except ImportError as e:
1862
failed_to_load_extension(e)
1865
if _selected_dir_reader is None:
1866
# Fallback to the python version
1867
_selected_dir_reader = UnicodeDirReader()
1869
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1870
# But we don't actually uses 1-3 in pending, so set them to None
1871
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1872
read_dir = _selected_dir_reader.read_dir
1873
_directory = _directory_kind
1875
relroot, _, _, _, top = pending[-1].pop()
1878
dirblock = sorted(read_dir(relroot, top))
1879
yield (relroot, top), dirblock
1880
# push the user specified dirs from dirblock
1881
next = [d for d in reversed(dirblock) if d[2] == _directory]
1883
pending.append(next)
1886
class UnicodeDirReader(DirReader):
1887
"""A dir reader for non-utf8 file systems, which transcodes."""
1889
__slots__ = ['_utf8_encode']
1892
self._utf8_encode = codecs.getencoder('utf8')
1894
def top_prefix_to_starting_dir(self, top, prefix=""):
1895
"""See DirReader.top_prefix_to_starting_dir."""
1896
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1898
def read_dir(self, prefix, top):
1899
"""Read a single directory from a non-utf8 file system.
1901
top, and the abspath element in the output are unicode, all other paths
1902
are utf8. Local disk IO is done via unicode calls to listdir etc.
1904
This is currently the fallback code path when the filesystem encoding is
1905
not UTF-8. It may be better to implement an alternative so that we can
1906
safely handle paths that are not properly decodable in the current
1909
See DirReader.read_dir for details.
1911
_utf8_encode = self._utf8_encode
1913
def _fs_decode(s): return s.decode(_fs_enc)
1915
def _fs_encode(s): return s.encode(_fs_enc)
1917
_listdir = os.listdir
1918
_kind_from_mode = file_kind_from_stat_mode
1921
relprefix = prefix + b'/'
1924
top_slash = top + '/'
1927
append = dirblock.append
1928
for name_native in _listdir(top.encode('utf-8')):
1930
name = _fs_decode(name_native)
1931
except UnicodeDecodeError:
1932
raise errors.BadFilenameEncoding(
1933
relprefix + name_native, _fs_enc)
1934
name_utf8 = _utf8_encode(name)[0]
1935
abspath = top_slash + name
1936
statvalue = _lstat(abspath)
1937
kind = _kind_from_mode(statvalue.st_mode)
1938
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
1939
return sorted(dirblock)
1942
def copy_tree(from_path, to_path, handlers={}):
1943
"""Copy all of the entries in from_path into to_path.
1945
:param from_path: The base directory to copy.
1946
:param to_path: The target directory. If it does not exist, it will
1948
:param handlers: A dictionary of functions, which takes a source and
1949
destinations for files, directories, etc.
1950
It is keyed on the file kind, such as 'directory', 'symlink', or 'file'
1951
'file', 'directory', and 'symlink' should always exist.
1952
If they are missing, they will be replaced with 'os.mkdir()',
1953
'os.readlink() + os.symlink()', and 'shutil.copy2()', respectively.
1955
# Now, just copy the existing cached tree to the new location
1956
# We use a cheap trick here.
1957
# Absolute paths are prefixed with the first parameter
1958
# relative paths are prefixed with the second.
1959
# So we can get both the source and target returned
1960
# without any extra work.
1962
def copy_dir(source, dest):
1965
def copy_link(source, dest):
1966
"""Copy the contents of a symlink"""
1967
link_to = os.readlink(source)
1968
os.symlink(link_to, dest)
1970
real_handlers = {'file': shutil.copy2,
1971
'symlink': copy_link,
1972
'directory': copy_dir,
1974
real_handlers.update(handlers)
1976
if not os.path.exists(to_path):
1977
real_handlers['directory'](from_path, to_path)
1979
for dir_info, entries in walkdirs(from_path, prefix=to_path):
1980
for relpath, name, kind, st, abspath in entries:
1981
real_handlers[kind](abspath, relpath)
1984
def copy_ownership_from_path(dst, src=None):
1985
"""Copy usr/grp ownership from src file/dir to dst file/dir.
1987
If src is None, the containing directory is used as source. If chown
1988
fails, the error is ignored and a warning is printed.
1990
chown = getattr(os, 'chown', None)
1995
src = os.path.dirname(dst)
2001
chown(dst, s.st_uid, s.st_gid)
2004
'Unable to copy ownership from "%s" to "%s". '
2005
'You may want to set it manually.', src, dst)
2006
trace.log_exception_quietly()
2009
def path_prefix_key(path):
2010
"""Generate a prefix-order path key for path.
2012
This can be used to sort paths in the same way that walkdirs does.
2014
return (dirname(path), path)
2017
def compare_paths_prefix_order(path_a, path_b):
2018
"""Compare path_a and path_b to generate the same order walkdirs uses."""
2019
key_a = path_prefix_key(path_a)
2020
key_b = path_prefix_key(path_b)
2021
return (key_a > key_b) - (key_a < key_b)
2024
_cached_user_encoding = None
2027
def get_user_encoding():
2028
"""Find out what the preferred user encoding is.
2030
This is generally the encoding that is used for command line parameters
2031
and file contents. This may be different from the terminal encoding
2032
or the filesystem encoding.
2034
:return: A string defining the preferred user encoding
2036
global _cached_user_encoding
2037
if _cached_user_encoding is not None:
2038
return _cached_user_encoding
2040
if os.name == 'posix' and getattr(locale, 'CODESET', None) is not None:
2041
# Use the existing locale settings and call nl_langinfo directly
2042
# rather than going through getpreferredencoding. This avoids
2043
# <http://bugs.python.org/issue6202> on OSX Python 2.6 and the
2044
# possibility of the setlocale call throwing an error.
2045
user_encoding = locale.nl_langinfo(locale.CODESET)
2047
# GZ 2011-12-19: On windows could call GetACP directly instead.
2048
user_encoding = locale.getpreferredencoding(False)
2051
user_encoding = codecs.lookup(user_encoding).name
2053
if user_encoding not in ("", "cp0"):
2054
sys.stderr.write('brz: warning:'
2055
' unknown encoding %s.'
2056
' Continuing with ascii encoding.\n'
2059
user_encoding = 'ascii'
2061
# Get 'ascii' when setlocale has not been called or LANG=C or unset.
2062
if user_encoding == 'ascii':
2063
if sys.platform == 'darwin':
2064
# OSX is special-cased in Python to have a UTF-8 filesystem
2065
# encoding and previously had LANG set here if not present.
2066
user_encoding = 'utf-8'
2067
# GZ 2011-12-19: Maybe UTF-8 should be the default in this case
2068
# for some other posix platforms as well.
2070
_cached_user_encoding = user_encoding
2071
return user_encoding
2074
def get_diff_header_encoding():
2075
return get_terminal_encoding()
2078
def get_host_name():
2079
"""Return the current unicode host name.
2081
This is meant to be used in place of socket.gethostname() because that
2082
behaves inconsistently on different platforms.
2084
if sys.platform == "win32":
2085
return win32utils.get_host_name()
2088
return socket.gethostname()
2091
# We must not read/write any more than 64k at a time from/to a socket so we
2092
# don't risk "no buffer space available" errors on some platforms. Windows in
2093
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
2095
MAX_SOCKET_CHUNK = 64 * 1024
2097
_end_of_stream_errors = [errno.ECONNRESET, errno.EPIPE, errno.EINVAL]
2098
for _eno in ['WSAECONNRESET', 'WSAECONNABORTED']:
2099
_eno = getattr(errno, _eno, None)
2100
if _eno is not None:
2101
_end_of_stream_errors.append(_eno)
2105
def read_bytes_from_socket(sock, report_activity=None,
2106
max_read_size=MAX_SOCKET_CHUNK):
2107
"""Read up to max_read_size of bytes from sock and notify of progress.
2109
Translates "Connection reset by peer" into file-like EOF (return an
2110
empty string rather than raise an error), and repeats the recv if
2111
interrupted by a signal.
2115
data = sock.recv(max_read_size)
2116
except socket.error as e:
2118
if eno in _end_of_stream_errors:
2119
# The connection was closed by the other side. Callers expect
2120
# an empty string to signal end-of-stream.
2122
elif eno == errno.EINTR:
2123
# Retry the interrupted recv.
2127
if report_activity is not None:
2128
report_activity(len(data), 'read')
2132
def recv_all(socket, count):
2133
"""Receive an exact number of bytes.
2135
Regular Socket.recv() may return less than the requested number of bytes,
2136
depending on what's in the OS buffer. MSG_WAITALL is not available
2137
on all platforms, but this should work everywhere. This will return
2138
less than the requested amount if the remote end closes.
2140
This isn't optimized and is intended mostly for use in testing.
2143
while len(b) < count:
2144
new = read_bytes_from_socket(socket, None, count - len(b))
2151
def send_all(sock, bytes, report_activity=None):
2152
"""Send all bytes on a socket.
2154
Breaks large blocks in smaller chunks to avoid buffering limitations on
2155
some platforms, and catches EINTR which may be thrown if the send is
2156
interrupted by a signal.
2158
This is preferred to socket.sendall(), because it avoids portability bugs
2159
and provides activity reporting.
2161
:param report_activity: Call this as bytes are read, see
2162
Transport._report_activity
2165
byte_count = len(bytes)
2166
view = memoryview(bytes)
2167
while sent_total < byte_count:
2169
sent = sock.send(view[sent_total:sent_total + MAX_SOCKET_CHUNK])
2170
except (socket.error, IOError) as e:
2171
if e.args[0] in _end_of_stream_errors:
2172
raise errors.ConnectionReset(
2173
"Error trying to write to socket", e)
2174
if e.args[0] != errno.EINTR:
2178
raise errors.ConnectionReset('Sending to %s returned 0 bytes'
2181
if report_activity is not None:
2182
report_activity(sent, 'write')
2185
def connect_socket(address):
2186
# Slight variation of the socket.create_connection() function (provided by
2187
# python-2.6) that can fail if getaddrinfo returns an empty list. We also
2188
# provide it for previous python versions. Also, we don't use the timeout
2189
# parameter (provided by the python implementation) so we don't implement
2191
err = socket.error('getaddrinfo returns an empty list')
2192
host, port = address
2193
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
2194
af, socktype, proto, canonname, sa = res
2197
sock = socket.socket(af, socktype, proto)
2201
except socket.error as e:
2203
# 'err' is now the most recent error
2204
if sock is not None:
2209
def dereference_path(path):
2210
"""Determine the real path to a file.
2212
All parent elements are dereferenced. But the file itself is not
2214
:param path: The original path. May be absolute or relative.
2215
:return: the real path *to* the file
2217
parent, base = os.path.split(path)
2218
# The pathjoin for '.' is a workaround for Python bug #1213894.
2219
# (initial path components aren't dereferenced)
2220
return pathjoin(realpath(pathjoin('.', parent)), base)
2223
def supports_mapi():
2224
"""Return True if we can use MAPI to launch a mail client."""
2225
return sys.platform == "win32"
2228
def resource_string(package, resource_name):
2229
"""Load a resource from a package and return it as a string.
2231
Note: Only packages that start with breezy are currently supported.
2233
This is designed to be a lightweight implementation of resource
2234
loading in a way which is API compatible with the same API from
2236
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2237
If and when pkg_resources becomes a standard library, this routine
2240
# Check package name is within breezy
2241
if package == "breezy":
2242
resource_relpath = resource_name
2243
elif package.startswith("breezy."):
2244
package = package[len("breezy."):].replace('.', os.sep)
2245
resource_relpath = pathjoin(package, resource_name)
2247
raise errors.BzrError('resource package %s not in breezy' % package)
2249
# Map the resource to a file and read its contents
2250
base = dirname(breezy.__file__)
2251
if getattr(sys, 'frozen', None): # bzr.exe
2252
base = abspath(pathjoin(base, '..', '..'))
2253
with open(pathjoin(base, resource_relpath), "rt") as f:
2257
def file_kind_from_stat_mode_thunk(mode):
2258
global file_kind_from_stat_mode
2259
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2261
from ._readdir_pyx import UTF8DirReader
2262
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2264
# This is one time where we won't warn that an extension failed to
2265
# load. The extension is never available on Windows anyway.
2266
from ._readdir_py import (
2267
_kind_from_mode as file_kind_from_stat_mode
2269
return file_kind_from_stat_mode(mode)
2272
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2275
def file_stat(f, _lstat=os.lstat):
2279
except OSError as e:
2280
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2281
raise errors.NoSuchFile(f)
2285
def file_kind(f, _lstat=os.lstat):
2286
stat_value = file_stat(f, _lstat)
2287
return file_kind_from_stat_mode(stat_value.st_mode)
2290
def until_no_eintr(f, *a, **kw):
2291
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2293
WARNING: you must be certain that it is safe to retry the call repeatedly
2294
if EINTR does occur. This is typically only true for low-level operations
2295
like os.read. If in any doubt, don't use this.
2297
Keep in mind that this is not a complete solution to EINTR. There is
2298
probably code in the Python standard library and other dependencies that
2299
may encounter EINTR if a signal arrives (and there is signal handler for
2300
that signal). So this function can reduce the impact for IO that breezy
2301
directly controls, but it is not a complete solution.
2303
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2307
except (IOError, OSError) as e:
2308
if e.errno == errno.EINTR:
2313
if sys.platform == "win32":
2316
return msvcrt.getch()
2321
fd = sys.stdin.fileno()
2322
settings = termios.tcgetattr(fd)
2325
ch = sys.stdin.read(1)
2327
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2330
if sys.platform.startswith('linux'):
2331
def _local_concurrency():
2333
return os.sysconf('SC_NPROCESSORS_ONLN')
2334
except (ValueError, OSError, AttributeError):
2336
elif sys.platform == 'darwin':
2337
def _local_concurrency():
2338
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2339
stdout=subprocess.PIPE).communicate()[0]
2340
elif "bsd" in sys.platform:
2341
def _local_concurrency():
2342
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2343
stdout=subprocess.PIPE).communicate()[0]
2344
elif sys.platform == 'sunos5':
2345
def _local_concurrency():
2346
return subprocess.Popen(['psrinfo', '-p', ],
2347
stdout=subprocess.PIPE).communicate()[0]
2348
elif sys.platform == "win32":
2349
def _local_concurrency():
2350
# This appears to return the number of cores.
2351
return os.environ.get('NUMBER_OF_PROCESSORS')
2353
def _local_concurrency():
2358
_cached_local_concurrency = None
2361
def local_concurrency(use_cache=True):
2362
"""Return how many processes can be run concurrently.
2364
Rely on platform specific implementations and default to 1 (one) if
2365
anything goes wrong.
2367
global _cached_local_concurrency
2369
if _cached_local_concurrency is not None and use_cache:
2370
return _cached_local_concurrency
2372
concurrency = os.environ.get('BRZ_CONCURRENCY', None)
2373
if concurrency is None:
2374
import multiprocessing
2376
concurrency = multiprocessing.cpu_count()
2377
except NotImplementedError:
2378
# multiprocessing.cpu_count() isn't implemented on all platforms
2380
concurrency = _local_concurrency()
2381
except (OSError, IOError):
2384
concurrency = int(concurrency)
2385
except (TypeError, ValueError):
2388
_cached_local_concurrency = concurrency
2392
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2393
"""A stream writer that doesn't decode str arguments."""
2395
def __init__(self, encode, stream, errors='strict'):
2396
codecs.StreamWriter.__init__(self, stream, errors)
2397
self.encode = encode
2399
def write(self, object):
2400
if isinstance(object, str):
2401
self.stream.write(object)
2403
data, _ = self.encode(object, self.errors)
2404
self.stream.write(data)
2407
if sys.platform == 'win32':
2408
def open_file(filename, mode='r', bufsize=-1):
2409
"""This function is used to override the ``open`` builtin.
2411
But it uses O_NOINHERIT flag so the file handle is not inherited by
2412
child processes. Deleting or renaming a closed file opened with this
2413
function is not blocking child processes.
2415
writing = 'w' in mode
2416
appending = 'a' in mode
2417
updating = '+' in mode
2418
binary = 'b' in mode
2421
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2422
# for flags for each modes.
2432
flags |= os.O_WRONLY
2433
flags |= os.O_CREAT | os.O_TRUNC
2438
flags |= os.O_WRONLY
2439
flags |= os.O_CREAT | os.O_APPEND
2444
flags |= os.O_RDONLY
2446
return os.fdopen(os.open(filename, flags), mode, bufsize)
2451
def available_backup_name(base, exists):
2452
"""Find a non-existing backup file name.
2454
This will *not* create anything, this only return a 'free' entry. This
2455
should be used for checking names in a directory below a locked
2456
tree/branch/repo to avoid race conditions. This is LBYL (Look Before You
2457
Leap) and generally discouraged.
2459
:param base: The base name.
2461
:param exists: A callable returning True if the path parameter exists.
2464
name = "%s.~%d~" % (base, counter)
2467
name = "%s.~%d~" % (base, counter)
2471
def set_fd_cloexec(fd):
2472
"""Set a Unix file descriptor's FD_CLOEXEC flag. Do nothing if platform
2473
support for this is not available.
2477
old = fcntl.fcntl(fd, fcntl.F_GETFD)
2478
fcntl.fcntl(fd, fcntl.F_SETFD, old | fcntl.FD_CLOEXEC)
2479
except (ImportError, AttributeError):
2480
# Either the fcntl module or specific constants are not present
2484
def find_executable_on_path(name):
2485
"""Finds an executable on the PATH.
2487
On Windows, this will try to append each extension in the PATHEXT
2488
environment variable to the name, if it cannot be found with the name
2491
:param name: The base name of the executable.
2492
:return: The path to the executable found or None.
2494
if sys.platform == 'win32':
2495
exts = os.environ.get('PATHEXT', '').split(os.pathsep)
2496
exts = [ext.lower() for ext in exts]
2497
base, ext = os.path.splitext(name)
2499
if ext.lower() not in exts:
2505
path = os.environ.get('PATH')
2506
if path is not None:
2507
path = path.split(os.pathsep)
2510
f = os.path.join(d, name) + ext
2511
if os.access(f, os.X_OK):
2513
if sys.platform == 'win32':
2514
app_path = win32utils.get_app_path(name)
2515
if app_path != name:
2520
def _posix_is_local_pid_dead(pid):
2521
"""True if pid doesn't correspond to live process on this machine"""
2523
# Special meaning of unix kill: just check if it's there.
2525
except OSError as e:
2526
if e.errno == errno.ESRCH:
2527
# On this machine, and really not found: as sure as we can be
2530
elif e.errno == errno.EPERM:
2531
# exists, though not ours
2534
trace.mutter("os.kill(%d, 0) failed: %s" % (pid, e))
2535
# Don't really know.
2538
# Exists and our process: not dead.
2542
if sys.platform == "win32":
2543
is_local_pid_dead = win32utils.is_local_pid_dead
2545
is_local_pid_dead = _posix_is_local_pid_dead
2547
_maybe_ignored = ['EAGAIN', 'EINTR', 'ENOTSUP', 'EOPNOTSUPP', 'EACCES']
2548
_fdatasync_ignored = [getattr(errno, name) for name in _maybe_ignored
2549
if getattr(errno, name, None) is not None]
2552
def fdatasync(fileno):
2553
"""Flush file contents to disk if possible.
2555
:param fileno: Integer OS file handle.
2556
:raises TransportNotPossible: If flushing to disk is not possible.
2558
fn = getattr(os, 'fdatasync', getattr(os, 'fsync', None))
2562
except IOError as e:
2563
# See bug #1075108, on some platforms fdatasync exists, but can
2564
# raise ENOTSUP. However, we are calling fdatasync to be helpful
2565
# and reduce the chance of corruption-on-powerloss situations. It
2566
# is not a mandatory call, so it is ok to suppress failures.
2567
trace.mutter("ignoring error calling fdatasync: %s" % (e,))
2568
if getattr(e, 'errno', None) not in _fdatasync_ignored:
2572
def ensure_empty_directory_exists(path, exception_class):
2573
"""Make sure a local directory exists and is empty.
2575
If it does not exist, it is created. If it exists and is not empty, an
2576
instance of exception_class is raised.
2580
except OSError as e:
2581
if e.errno != errno.EEXIST:
2583
if os.listdir(path) != []:
2584
raise exception_class(path)
2587
def is_environment_error(evalue):
2588
"""True if exception instance is due to a process environment issue
2590
This includes OSError and IOError, but also other errors that come from
2591
the operating system or core libraries but are not subclasses of those.
2593
if isinstance(evalue, (EnvironmentError, select.error)):
2595
if sys.platform == "win32" and win32utils._is_pywintypes_error(evalue):
2600
def read_mtab(path):
2601
"""Read an fstab-style file and extract mountpoint+filesystem information.
2603
:param path: Path to read from
2604
:yield: Tuples with mountpoints (as bytestrings) and filesystem names
2606
with open(path, 'rb') as f:
2608
if line.startswith(b'#'):
2613
yield cols[1], cols[2].decode('ascii', 'replace')
2616
MTAB_PATH = '/etc/mtab'
2618
class FilesystemFinder(object):
2619
"""Find the filesystem for a particular path."""
2621
def __init__(self, mountpoints):
2624
self._mountpoints = sorted(mountpoints, key=key, reverse=True)
2628
"""Create a FilesystemFinder from an mtab-style file.
2630
Note that this will silenty ignore mtab if it doesn't exist or can not
2633
# TODO(jelmer): Use inotify to be notified when /etc/mtab changes and
2634
# we need to re-read it.
2636
return cls(read_mtab(MTAB_PATH))
2637
except EnvironmentError as e:
2638
trace.mutter('Unable to read mtab: %s', e)
2641
def find(self, path):
2642
"""Find the filesystem used by a particular path.
2644
:param path: Path to find (bytestring or text type)
2645
:return: Filesystem name (as text type) or None, if the filesystem is
2648
for mountpoint, filesystem in self._mountpoints:
2649
if is_inside(mountpoint, path):
2654
_FILESYSTEM_FINDER = None
2657
def get_fs_type(path):
2658
"""Return the filesystem type for the partition a path is in.
2660
:param path: Path to search filesystem type for
2661
:return: A FS type, as string. E.g. "ext2"
2663
global _FILESYSTEM_FINDER
2664
if _FILESYSTEM_FINDER is None:
2665
_FILESYSTEM_FINDER = FilesystemFinder.from_mtab()
2667
if not isinstance(path, bytes):
2668
path = path.encode(_fs_enc)
2670
return _FILESYSTEM_FINDER.find(path)
2673
perf_counter = time.perf_counter