1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
27
from .lazy_import import lazy_import
28
lazy_import(globals(), """
29
from datetime import datetime
35
# We need to import both shutil and rmtree as we export the later on posix
36
# and need the former on windows
38
from shutil import rmtree
41
# We need to import both tempfile and mkdtemp as we export the later on posix
42
# and need the former on windows
44
from tempfile import mkdtemp
52
from breezy.i18n import gettext
73
# Cross platform wall-clock time functionality with decent resolution.
74
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
75
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
76
# synchronized with ``time.time()``, this is only meant to be used to find
77
# delta times by subtracting from another call to this function.
78
timer_func = time.time
79
if sys.platform == 'win32':
80
timer_func = time.clock
82
# On win32, O_BINARY is used to indicate the file should
83
# be opened in binary mode, rather than text mode.
84
# On other platforms, O_BINARY doesn't exist, because
85
# they always open in binary mode, so it is okay to
86
# OR with 0 on those platforms.
87
# O_NOINHERIT and O_TEXT exists only on win32 too.
88
O_BINARY = getattr(os, 'O_BINARY', 0)
89
O_TEXT = getattr(os, 'O_TEXT', 0)
90
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
93
class UnsupportedTimezoneFormat(errors.BzrError):
95
_fmt = ('Unsupported timezone format "%(timezone)s", '
96
'options are "utc", "original", "local".')
98
def __init__(self, timezone):
99
self.timezone = timezone
102
def get_unicode_argv():
106
user_encoding = get_user_encoding()
107
return [a.decode(user_encoding) for a in sys.argv[1:]]
108
except UnicodeDecodeError:
109
raise errors.BzrError(gettext("Parameter {0!r} encoding is unsupported by {1} "
110
"application locale.").format(a, user_encoding))
113
def make_readonly(filename):
114
"""Make a filename read-only."""
115
mod = os.lstat(filename).st_mode
116
if not stat.S_ISLNK(mod):
118
chmod_if_possible(filename, mod)
121
def make_writable(filename):
122
mod = os.lstat(filename).st_mode
123
if not stat.S_ISLNK(mod):
125
chmod_if_possible(filename, mod)
128
def chmod_if_possible(filename, mode):
129
# Set file mode if that can be safely done.
130
# Sometimes even on unix the filesystem won't allow it - see
131
# https://bugs.launchpad.net/bzr/+bug/606537
133
# It is probably faster to just do the chmod, rather than
134
# doing a stat, and then trying to compare
135
os.chmod(filename, mode)
136
except (IOError, OSError) as e:
137
# Permission/access denied seems to commonly happen on smbfs; there's
138
# probably no point warning about it.
139
# <https://bugs.launchpad.net/bzr/+bug/606537>
140
if getattr(e, 'errno') in (errno.EPERM, errno.EACCES):
141
trace.mutter("ignore error on chmod of %r: %r" % (
147
def minimum_path_selection(paths):
148
"""Return the smallset subset of paths which are outside paths.
150
:param paths: A container (and hence not None) of paths.
151
:return: A set of paths sufficient to include everything in paths via
152
is_inside, drawn from the paths parameter.
158
if isinstance(path, bytes):
159
return path.split(b'/')
161
return path.split('/')
162
sorted_paths = sorted(list(paths), key=sort_key)
164
search_paths = [sorted_paths[0]]
165
for path in sorted_paths[1:]:
166
if not is_inside(search_paths[-1], path):
167
# This path is unique, add it
168
search_paths.append(path)
170
return set(search_paths)
177
"""Return a quoted filename filename
179
This previously used backslash quoting, but that works poorly on
181
# TODO: I'm not really sure this is the best format either.x
183
if _QUOTE_RE is None:
184
_QUOTE_RE = re.compile(r'([^a-zA-Z0-9.,:/\\_~-])')
186
if _QUOTE_RE.search(f):
192
_directory_kind = 'directory'
195
"""Return the current umask"""
196
# Assume that people aren't messing with the umask while running
197
# XXX: This is not thread safe, but there is no way to get the
198
# umask without setting it
206
_directory_kind: "/",
208
'tree-reference': '+',
212
def kind_marker(kind):
214
return _kind_marker_map[kind]
216
# Slightly faster than using .get(, '') when the common case is that
221
lexists = getattr(os.path, 'lexists', None)
225
stat = getattr(os, 'lstat', os.stat)
229
if e.errno == errno.ENOENT:
232
raise errors.BzrError(gettext("lstat/stat of ({0!r}): {1!r}").format(f, e))
235
def fancy_rename(old, new, rename_func, unlink_func):
236
"""A fancy rename, when you don't have atomic rename.
238
:param old: The old path, to rename from
239
:param new: The new path, to rename to
240
:param rename_func: The potentially non-atomic rename function
241
:param unlink_func: A way to delete the target file if the full rename
244
# sftp rename doesn't allow overwriting, so play tricks:
245
base = os.path.basename(new)
246
dirname = os.path.dirname(new)
247
# callers use different encodings for the paths so the following MUST
248
# respect that. We rely on python upcasting to unicode if new is unicode
249
# and keeping a str if not.
250
tmp_name = 'tmp.%s.%.9f.%d.%s' % (base, time.time(),
251
os.getpid(), rand_chars(10))
252
tmp_name = pathjoin(dirname, tmp_name)
254
# Rename the file out of the way, but keep track if it didn't exist
255
# We don't want to grab just any exception
256
# something like EACCES should prevent us from continuing
257
# The downside is that the rename_func has to throw an exception
258
# with an errno = ENOENT, or NoSuchFile
261
rename_func(new, tmp_name)
262
except (errors.NoSuchFile,) as e:
265
# RBC 20060103 abstraction leakage: the paramiko SFTP clients rename
266
# function raises an IOError with errno is None when a rename fails.
267
# This then gets caught here.
268
if e.errno not in (None, errno.ENOENT, errno.ENOTDIR):
270
except Exception as e:
271
if (getattr(e, 'errno', None) is None
272
or e.errno not in (errno.ENOENT, errno.ENOTDIR)):
279
# This may throw an exception, in which case success will
281
rename_func(old, new)
283
except (IOError, OSError) as e:
284
# source and target may be aliases of each other (e.g. on a
285
# case-insensitive filesystem), so we may have accidentally renamed
286
# source by when we tried to rename target
287
if (file_existed and e.errno in (None, errno.ENOENT)
288
and old.lower() == new.lower()):
289
# source and target are the same file on a case-insensitive
290
# filesystem, so we don't generate an exception
296
# If the file used to exist, rename it back into place
297
# otherwise just delete it from the tmp location
299
unlink_func(tmp_name)
301
rename_func(tmp_name, new)
304
# In Python 2.4.2 and older, os.path.abspath and os.path.realpath
305
# choke on a Unicode string containing a relative path if
306
# os.getcwd() returns a non-sys.getdefaultencoding()-encoded
308
def _posix_abspath(path):
309
# jam 20060426 rather than encoding to fsencoding
310
# copy posixpath.abspath, but use os.getcwdu instead
311
if not posixpath.isabs(path):
312
path = posixpath.join(getcwd(), path)
313
return _posix_normpath(path)
316
def _posix_realpath(path):
317
return posixpath.realpath(path.encode(_fs_enc)).decode(_fs_enc)
320
def _posix_normpath(path):
321
path = posixpath.normpath(path)
322
# Bug 861008: posixpath.normpath() returns a path normalized according to
323
# the POSIX standard, which stipulates (for compatibility reasons) that two
324
# leading slashes must not be simplified to one, and only if there are 3 or
325
# more should they be simplified as one. So we treat the leading 2 slashes
326
# as a special case here by simply removing the first slash, as we consider
327
# that breaking POSIX compatibility for this obscure feature is acceptable.
328
# This is not a paranoid precaution, as we notably get paths like this when
329
# the repo is hosted at the root of the filesystem, i.e. in "/".
330
if path.startswith('//'):
335
def _posix_path_from_environ(key):
336
"""Get unicode path from `key` in environment or None if not present
338
Note that posix systems use arbitrary byte strings for filesystem objects,
339
so a path that raises BadFilenameEncoding here may still be accessible.
341
val = os.environ.get(key, None)
342
if PY3 or val is None:
345
return val.decode(_fs_enc)
346
except UnicodeDecodeError:
347
# GZ 2011-12-12:Ideally want to include `key` in the exception message
348
raise errors.BadFilenameEncoding(val, _fs_enc)
351
def _posix_get_home_dir():
352
"""Get the home directory of the current user as a unicode path"""
353
path = posixpath.expanduser("~")
355
return path.decode(_fs_enc)
356
except AttributeError:
358
except UnicodeDecodeError:
359
raise errors.BadFilenameEncoding(path, _fs_enc)
362
def _posix_getuser_unicode():
363
"""Get username from environment or password database as unicode"""
364
name = getpass.getuser()
367
user_encoding = get_user_encoding()
369
return name.decode(user_encoding)
370
except UnicodeDecodeError:
371
raise errors.BzrError("Encoding of username %r is unsupported by %s "
372
"application locale." % (name, user_encoding))
375
def _win32_fixdrive(path):
376
"""Force drive letters to be consistent.
378
win32 is inconsistent whether it returns lower or upper case
379
and even if it was consistent the user might type the other
380
so we force it to uppercase
381
running python.exe under cmd.exe return capital C:\\
382
running win32 python inside a cygwin shell returns lowercase c:\\
384
drive, path = ntpath.splitdrive(path)
385
return drive.upper() + path
388
def _win32_abspath(path):
389
# Real ntpath.abspath doesn't have a problem with a unicode cwd
390
return _win32_fixdrive(ntpath.abspath(path).replace('\\', '/'))
393
def _win32_realpath(path):
394
# Real ntpath.realpath doesn't have a problem with a unicode cwd
395
return _win32_fixdrive(ntpath.realpath(path).replace('\\', '/'))
398
def _win32_pathjoin(*args):
399
return ntpath.join(*args).replace('\\', '/')
402
def _win32_normpath(path):
403
return _win32_fixdrive(ntpath.normpath(path).replace('\\', '/'))
407
return _win32_fixdrive(_getcwd().replace('\\', '/'))
410
def _win32_mkdtemp(*args, **kwargs):
411
return _win32_fixdrive(tempfile.mkdtemp(*args, **kwargs).replace('\\', '/'))
414
def _win32_rename(old, new):
415
"""We expect to be able to atomically replace 'new' with old.
417
On win32, if new exists, it must be moved out of the way first,
421
fancy_rename(old, new, rename_func=os.rename, unlink_func=os.unlink)
423
if e.errno in (errno.EPERM, errno.EACCES, errno.EBUSY, errno.EINVAL):
424
# If we try to rename a non-existant file onto cwd, we get
425
# EPERM or EACCES instead of ENOENT, this will raise ENOENT
426
# if the old path doesn't exist, sometimes we get EACCES
427
# On Linux, we seem to get EBUSY, on Mac we get EINVAL
433
return unicodedata.normalize('NFC', _getcwd())
436
def _rename_wrap_exception(rename_func):
437
"""Adds extra information to any exceptions that come from rename().
439
The exception has an updated message and 'old_filename' and 'new_filename'
443
def _rename_wrapper(old, new):
445
rename_func(old, new)
447
detailed_error = OSError(e.errno, e.strerror +
448
" [occurred when renaming '%s' to '%s']" %
450
detailed_error.old_filename = old
451
detailed_error.new_filename = new
454
return _rename_wrapper
457
if sys.version_info > (3,):
463
# Default rename wraps os.rename()
464
rename = _rename_wrap_exception(os.rename)
466
# Default is to just use the python builtins, but these can be rebound on
467
# particular platforms.
468
abspath = _posix_abspath
469
realpath = _posix_realpath
470
pathjoin = os.path.join
471
normpath = _posix_normpath
472
path_from_environ = _posix_path_from_environ
473
_get_home_dir = _posix_get_home_dir
474
getuser_unicode = _posix_getuser_unicode
476
dirname = os.path.dirname
477
basename = os.path.basename
478
split = os.path.split
479
splitext = os.path.splitext
480
# These were already lazily imported into local scope
481
# mkdtemp = tempfile.mkdtemp
482
# rmtree = shutil.rmtree
490
MIN_ABS_PATHLENGTH = 1
493
if sys.platform == 'win32':
494
abspath = _win32_abspath
495
realpath = _win32_realpath
496
pathjoin = _win32_pathjoin
497
normpath = _win32_normpath
498
getcwd = _win32_getcwd
499
mkdtemp = _win32_mkdtemp
500
rename = _rename_wrap_exception(_win32_rename)
502
from . import _walkdirs_win32
506
lstat = _walkdirs_win32.lstat
507
fstat = _walkdirs_win32.fstat
508
wrap_stat = _walkdirs_win32.wrap_stat
510
MIN_ABS_PATHLENGTH = 3
512
def _win32_delete_readonly(function, path, excinfo):
513
"""Error handler for shutil.rmtree function [for win32]
514
Helps to remove files and dirs marked as read-only.
516
exception = excinfo[1]
517
if function in (os.remove, os.rmdir) \
518
and isinstance(exception, OSError) \
519
and exception.errno == errno.EACCES:
525
def rmtree(path, ignore_errors=False, onerror=_win32_delete_readonly):
526
"""Replacer for shutil.rmtree: could remove readonly dirs/files"""
527
return shutil.rmtree(path, ignore_errors, onerror)
529
f = win32utils.get_unicode_argv # special function or None
532
path_from_environ = win32utils.get_environ_unicode
533
_get_home_dir = win32utils.get_home_location
534
getuser_unicode = win32utils.get_user_name
536
elif sys.platform == 'darwin':
540
def get_terminal_encoding(trace=False):
541
"""Find the best encoding for printing to the screen.
543
This attempts to check both sys.stdout and sys.stdin to see
544
what encoding they are in, and if that fails it falls back to
545
osutils.get_user_encoding().
546
The problem is that on Windows, locale.getpreferredencoding()
547
is not the same encoding as that used by the console:
548
http://mail.python.org/pipermail/python-list/2003-May/162357.html
550
On my standard US Windows XP, the preferred encoding is
551
cp1252, but the console is cp437
553
:param trace: If True trace the selected encoding via mutter().
555
from .trace import mutter
556
output_encoding = getattr(sys.stdout, 'encoding', None)
557
if not output_encoding:
558
input_encoding = getattr(sys.stdin, 'encoding', None)
559
if not input_encoding:
560
output_encoding = get_user_encoding()
562
mutter('encoding stdout as osutils.get_user_encoding() %r',
565
output_encoding = input_encoding
567
mutter('encoding stdout as sys.stdin encoding %r',
571
mutter('encoding stdout as sys.stdout encoding %r', output_encoding)
572
if output_encoding == 'cp0':
573
# invalid encoding (cp0 means 'no codepage' on Windows)
574
output_encoding = get_user_encoding()
576
mutter('cp0 is invalid encoding.'
577
' encoding stdout as osutils.get_user_encoding() %r',
581
codecs.lookup(output_encoding)
583
sys.stderr.write('brz: warning:'
584
' unknown terminal encoding %s.\n'
585
' Using encoding %s instead.\n'
586
% (output_encoding, get_user_encoding())
588
output_encoding = get_user_encoding()
590
return output_encoding
593
def normalizepath(f):
594
if getattr(os.path, 'realpath', None) is not None:
598
[p, e] = os.path.split(f)
599
if e == "" or e == "." or e == "..":
602
return pathjoin(F(p), e)
606
"""True if f is an accessible directory."""
608
return stat.S_ISDIR(os.lstat(f)[stat.ST_MODE])
614
"""True if f is a regular file."""
616
return stat.S_ISREG(os.lstat(f)[stat.ST_MODE])
621
"""True if f is a symlink."""
623
return stat.S_ISLNK(os.lstat(f)[stat.ST_MODE])
627
def is_inside(dir, fname):
628
"""True if fname is inside dir.
630
The parameters should typically be passed to osutils.normpath first, so
631
that . and .. and repeated slashes are eliminated, and the separators
632
are canonical for the platform.
634
The empty string as a dir name is taken as top-of-tree and matches
637
# XXX: Most callers of this can actually do something smarter by
638
# looking at the inventory
645
if isinstance(dir, bytes):
646
if not dir.endswith(b'/'):
649
if not dir.endswith('/'):
652
return fname.startswith(dir)
655
def is_inside_any(dir_list, fname):
656
"""True if fname is inside any of given dirs."""
657
for dirname in dir_list:
658
if is_inside(dirname, fname):
663
def is_inside_or_parent_of_any(dir_list, fname):
664
"""True if fname is a child or a parent of any of the given files."""
665
for dirname in dir_list:
666
if is_inside(dirname, fname) or is_inside(fname, dirname):
671
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
672
report_activity=None, direction='read'):
673
"""Copy contents of one file to another.
675
The read_length can either be -1 to read to end-of-file (EOF) or
676
it can specify the maximum number of bytes to read.
678
The buff_size represents the maximum size for each read operation
679
performed on from_file.
681
:param report_activity: Call this as bytes are read, see
682
Transport._report_activity
683
:param direction: Will be passed to report_activity
685
:return: The number of bytes copied.
689
# read specified number of bytes
691
while read_length > 0:
692
num_bytes_to_read = min(read_length, buff_size)
694
block = from_file.read(num_bytes_to_read)
698
if report_activity is not None:
699
report_activity(len(block), direction)
702
actual_bytes_read = len(block)
703
read_length -= actual_bytes_read
704
length += actual_bytes_read
708
block = from_file.read(buff_size)
712
if report_activity is not None:
713
report_activity(len(block), direction)
719
def pump_string_file(bytes, file_handle, segment_size=None):
720
"""Write bytes to file_handle in many smaller writes.
722
:param bytes: The string to write.
723
:param file_handle: The file to write to.
725
# Write data in chunks rather than all at once, because very large
726
# writes fail on some platforms (e.g. Windows with SMB mounted
729
segment_size = 5242880 # 5MB
730
offsets = range(0, len(bytes), segment_size)
731
view = memoryview(bytes)
732
write = file_handle.write
733
for offset in offsets:
734
write(view[offset:offset+segment_size])
737
def file_iterator(input_file, readsize=32768):
739
b = input_file.read(readsize)
745
# GZ 2017-09-16: Makes sense in general for hexdigest() result to be text, but
746
# used as bytes through most interfaces so encode with this wrapper.
748
def _hexdigest(hashobj):
749
return hashobj.hexdigest().encode()
751
def _hexdigest(hashobj):
752
return hashobj.hexdigest()
756
"""Calculate the hexdigest of an open file.
758
The file cursor should be already at the start.
770
def size_sha_file(f):
771
"""Calculate the size and hexdigest of an open file.
773
The file cursor should be already at the start and
774
the caller is responsible for closing the file afterwards.
785
return size, _hexdigest(s)
788
def sha_file_by_name(fname):
789
"""Calculate the SHA1 of a file by reading the full text"""
791
f = os.open(fname, os.O_RDONLY | O_BINARY | O_NOINHERIT)
794
b = os.read(f, 1<<16)
802
def sha_strings(strings, _factory=sha):
803
"""Return the sha-1 of concatenation of strings"""
805
for string in strings:
810
def sha_string(f, _factory=sha):
811
# GZ 2017-09-16: Dodgy if factory is ever not sha, probably shouldn't be.
812
return _hexdigest(_factory(f))
815
def fingerprint_file(f):
817
return {'size': len(b),
818
'sha1': _hexdigest(sha(b))}
821
def compare_files(a, b):
822
"""Returns true if equal in contents"""
833
def local_time_offset(t=None):
834
"""Return offset of local zone from GMT, either at present or at time t."""
837
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
838
return offset.days * 86400 + offset.seconds
840
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
841
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
844
def format_date(t, offset=0, timezone='original', date_fmt=None,
846
"""Return a formatted date string.
848
:param t: Seconds since the epoch.
849
:param offset: Timezone offset in seconds east of utc.
850
:param timezone: How to display the time: 'utc', 'original' for the
851
timezone specified by offset, or 'local' for the process's current
853
:param date_fmt: strftime format.
854
:param show_offset: Whether to append the timezone.
856
(date_fmt, tt, offset_str) = \
857
_format_date(t, offset, timezone, date_fmt, show_offset)
858
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
859
date_str = time.strftime(date_fmt, tt)
860
return date_str + offset_str
863
# Cache of formatted offset strings
867
def format_date_with_offset_in_original_timezone(t, offset=0,
868
_cache=_offset_cache):
869
"""Return a formatted date string in the original timezone.
871
This routine may be faster then format_date.
873
:param t: Seconds since the epoch.
874
:param offset: Timezone offset in seconds east of utc.
878
tt = time.gmtime(t + offset)
879
date_fmt = _default_format_by_weekday_num[tt[6]]
880
date_str = time.strftime(date_fmt, tt)
881
offset_str = _cache.get(offset, None)
882
if offset_str is None:
883
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
884
_cache[offset] = offset_str
885
return date_str + offset_str
888
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
890
"""Return an unicode date string formatted according to the current locale.
892
:param t: Seconds since the epoch.
893
:param offset: Timezone offset in seconds east of utc.
894
:param timezone: How to display the time: 'utc', 'original' for the
895
timezone specified by offset, or 'local' for the process's current
897
:param date_fmt: strftime format.
898
:param show_offset: Whether to append the timezone.
900
(date_fmt, tt, offset_str) = \
901
_format_date(t, offset, timezone, date_fmt, show_offset)
902
date_str = time.strftime(date_fmt, tt)
903
if not isinstance(date_str, text_type):
904
date_str = date_str.decode(get_user_encoding(), 'replace')
905
return date_str + offset_str
908
def _format_date(t, offset, timezone, date_fmt, show_offset):
909
if timezone == 'utc':
912
elif timezone == 'original':
915
tt = time.gmtime(t + offset)
916
elif timezone == 'local':
917
tt = time.localtime(t)
918
offset = local_time_offset(t)
920
raise UnsupportedTimezoneFormat(timezone)
922
date_fmt = "%a %Y-%m-%d %H:%M:%S"
924
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
927
return (date_fmt, tt, offset_str)
930
def compact_date(when):
931
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
934
def format_delta(delta):
935
"""Get a nice looking string for a time delta.
937
:param delta: The time difference in seconds, can be positive or negative.
938
positive indicates time in the past, negative indicates time in the
939
future. (usually time.time() - stored_time)
940
:return: String formatted to show approximate resolution
946
direction = 'in the future'
950
if seconds < 90: # print seconds up to 90 seconds
952
return '%d second %s' % (seconds, direction,)
954
return '%d seconds %s' % (seconds, direction)
956
minutes = int(seconds / 60)
957
seconds -= 60 * minutes
962
if minutes < 90: # print minutes, seconds up to 90 minutes
964
return '%d minute, %d second%s %s' % (
965
minutes, seconds, plural_seconds, direction)
967
return '%d minutes, %d second%s %s' % (
968
minutes, seconds, plural_seconds, direction)
970
hours = int(minutes / 60)
971
minutes -= 60 * hours
978
return '%d hour, %d minute%s %s' % (hours, minutes,
979
plural_minutes, direction)
980
return '%d hours, %d minute%s %s' % (hours, minutes,
981
plural_minutes, direction)
984
"""Return size of given open file."""
985
return os.fstat(f.fileno())[stat.ST_SIZE]
988
# Alias os.urandom to support platforms (which?) without /dev/urandom and
989
# override if it doesn't work. Avoid checking on windows where there is
990
# significant initialisation cost that can be avoided for some bzr calls.
992
rand_bytes = os.urandom
994
if rand_bytes.__module__ != "nt":
997
except NotImplementedError:
998
# not well seeded, but better than nothing
1003
s += chr(random.randint(0, 255))
1008
ALNUM = '0123456789abcdefghijklmnopqrstuvwxyz'
1009
def rand_chars(num):
1010
"""Return a random string of num alphanumeric characters
1012
The result only contains lowercase chars because it may be used on
1013
case-insensitive filesystems.
1016
for raw_byte in rand_bytes(num):
1018
s += ALNUM[ord(raw_byte) % 36]
1020
s += ALNUM[raw_byte % 36]
1024
## TODO: We could later have path objects that remember their list
1025
## decomposition (might be too tricksy though.)
1028
"""Turn string into list of parts."""
1029
# split on either delimiter because people might use either on
1031
if isinstance(p, bytes):
1032
ps = re.split(b'[\\\\/]', p)
1034
ps = re.split(r'[\\/]', p)
1038
if f in ('..', b'..'):
1039
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1040
elif f in ('.', '', b'.', b''):
1049
if (f == '..') or (f is None) or (f == ''):
1050
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1054
def parent_directories(filename):
1055
"""Return the list of parent directories, deepest first.
1057
For example, parent_directories("a/b/c") -> ["a/b", "a"].
1060
parts = splitpath(dirname(filename))
1062
parents.append(joinpath(parts))
1067
_extension_load_failures = []
1070
def failed_to_load_extension(exception):
1071
"""Handle failing to load a binary extension.
1073
This should be called from the ImportError block guarding the attempt to
1074
import the native extension. If this function returns, the pure-Python
1075
implementation should be loaded instead::
1078
>>> import breezy._fictional_extension_pyx
1079
>>> except ImportError, e:
1080
>>> breezy.osutils.failed_to_load_extension(e)
1081
>>> import breezy._fictional_extension_py
1083
# NB: This docstring is just an example, not a doctest, because doctest
1084
# currently can't cope with the use of lazy imports in this namespace --
1087
# This currently doesn't report the failure at the time it occurs, because
1088
# they tend to happen very early in startup when we can't check config
1089
# files etc, and also we want to report all failures but not spam the user
1091
exception_str = str(exception)
1092
if exception_str not in _extension_load_failures:
1093
trace.mutter("failed to load compiled extension: %s" % exception_str)
1094
_extension_load_failures.append(exception_str)
1097
def report_extension_load_failures():
1098
if not _extension_load_failures:
1100
if config.GlobalConfig().suppress_warning('missing_extensions'):
1102
# the warnings framework should by default show this only once
1103
from .trace import warning
1105
"brz: warning: some compiled extensions could not be loaded; "
1106
"see ``brz help missing-extensions``")
1107
# we no longer show the specific missing extensions here, because it makes
1108
# the message too long and scary - see
1109
# https://bugs.launchpad.net/bzr/+bug/430529
1113
from ._chunks_to_lines_pyx import chunks_to_lines
1114
except ImportError as e:
1115
failed_to_load_extension(e)
1116
from ._chunks_to_lines_py import chunks_to_lines
1120
"""Split s into lines, but without removing the newline characters."""
1121
# Trivially convert a fulltext into a 'chunked' representation, and let
1122
# chunks_to_lines do the heavy lifting.
1123
if isinstance(s, bytes):
1124
# chunks_to_lines only supports 8-bit strings
1125
return chunks_to_lines([s])
1127
return _split_lines(s)
1130
def _split_lines(s):
1131
"""Split s into lines, but without removing the newline characters.
1133
This supports Unicode or plain string objects.
1135
nl = b'\n' if isinstance(s, bytes) else u'\n'
1137
result = [line + nl for line in lines[:-1]]
1139
result.append(lines[-1])
1143
def hardlinks_good():
1144
return sys.platform not in ('win32', 'cygwin', 'darwin')
1147
def link_or_copy(src, dest):
1148
"""Hardlink a file, or copy it if it can't be hardlinked."""
1149
if not hardlinks_good():
1150
shutil.copyfile(src, dest)
1154
except (OSError, IOError) as e:
1155
if e.errno != errno.EXDEV:
1157
shutil.copyfile(src, dest)
1160
def delete_any(path):
1161
"""Delete a file, symlink or directory.
1163
Will delete even if readonly.
1166
_delete_file_or_dir(path)
1167
except (OSError, IOError) as e:
1168
if e.errno in (errno.EPERM, errno.EACCES):
1169
# make writable and try again
1172
except (OSError, IOError):
1174
_delete_file_or_dir(path)
1179
def _delete_file_or_dir(path):
1180
# Look Before You Leap (LBYL) is appropriate here instead of Easier to Ask for
1181
# Forgiveness than Permission (EAFP) because:
1182
# - root can damage a solaris file system by using unlink,
1183
# - unlink raises different exceptions on different OSes (linux: EISDIR, win32:
1184
# EACCES, OSX: EPERM) when invoked on a directory.
1185
if isdir(path): # Takes care of symlinks
1192
if getattr(os, 'symlink', None) is not None:
1198
def has_hardlinks():
1199
if getattr(os, 'link', None) is not None:
1205
def host_os_dereferences_symlinks():
1206
return (has_symlinks()
1207
and sys.platform not in ('cygwin', 'win32'))
1210
def readlink(abspath):
1211
"""Return a string representing the path to which the symbolic link points.
1213
:param abspath: The link absolute unicode path.
1215
This his guaranteed to return the symbolic link in unicode in all python
1218
link = abspath.encode(_fs_enc)
1219
target = os.readlink(link)
1220
target = target.decode(_fs_enc)
1224
def contains_whitespace(s):
1225
"""True if there are any whitespace characters in s."""
1226
# string.whitespace can include '\xa0' in certain locales, because it is
1227
# considered "non-breaking-space" as part of ISO-8859-1. But it
1228
# 1) Isn't a breaking whitespace
1229
# 2) Isn't one of ' \t\r\n' which are characters we sometimes use as
1231
# 3) '\xa0' isn't unicode safe since it is >128.
1233
if isinstance(s, str):
1236
ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f')
1244
def contains_linebreaks(s):
1245
"""True if there is any vertical whitespace in s."""
1253
def relpath(base, path):
1254
"""Return path relative to base, or raise PathNotChild exception.
1256
The path may be either an absolute path or a path relative to the
1257
current working directory.
1259
os.path.commonprefix (python2.4) has a bad bug that it works just
1260
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
1261
avoids that problem.
1263
NOTE: `base` should not have a trailing slash otherwise you'll get
1264
PathNotChild exceptions regardless of `path`.
1267
if len(base) < MIN_ABS_PATHLENGTH:
1268
# must have space for e.g. a drive letter
1269
raise ValueError(gettext('%r is too short to calculate a relative path')
1277
if len(head) <= len(base) and head != base:
1278
raise errors.PathNotChild(rp, base)
1281
head, tail = split(head)
1286
return pathjoin(*reversed(s))
1291
def _cicp_canonical_relpath(base, path):
1292
"""Return the canonical path relative to base.
1294
Like relpath, but on case-insensitive-case-preserving file-systems, this
1295
will return the relpath as stored on the file-system rather than in the
1296
case specified in the input string, for all existing portions of the path.
1298
This will cause O(N) behaviour if called for every path in a tree; if you
1299
have a number of paths to convert, you should use canonical_relpaths().
1301
# TODO: it should be possible to optimize this for Windows by using the
1302
# win32 API FindFiles function to look for the specified name - but using
1303
# os.listdir() still gives us the correct, platform agnostic semantics in
1306
rel = relpath(base, path)
1307
# '.' will have been turned into ''
1311
abs_base = abspath(base)
1313
_listdir = os.listdir
1315
# use an explicit iterator so we can easily consume the rest on early exit.
1316
bit_iter = iter(rel.split('/'))
1317
for bit in bit_iter:
1320
next_entries = _listdir(current)
1321
except OSError: # enoent, eperm, etc
1322
# We can't find this in the filesystem, so just append the
1324
current = pathjoin(current, bit, *list(bit_iter))
1326
for look in next_entries:
1327
if lbit == look.lower():
1328
current = pathjoin(current, look)
1331
# got to the end, nothing matched, so we just return the
1332
# non-existing bits as they were specified (the filename may be
1333
# the target of a move, for example).
1334
current = pathjoin(current, bit, *list(bit_iter))
1336
return current[len(abs_base):].lstrip('/')
1338
# XXX - TODO - we need better detection/integration of case-insensitive
1339
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1340
# filesystems), for example, so could probably benefit from the same basic
1341
# support there. For now though, only Windows and OSX get that support, and
1342
# they get it for *all* file-systems!
1343
if sys.platform in ('win32', 'darwin'):
1344
canonical_relpath = _cicp_canonical_relpath
1346
canonical_relpath = relpath
1348
def canonical_relpaths(base, paths):
1349
"""Create an iterable to canonicalize a sequence of relative paths.
1351
The intent is for this implementation to use a cache, vastly speeding
1352
up multiple transformations in the same directory.
1354
# but for now, we haven't optimized...
1355
return [canonical_relpath(base, p) for p in paths]
1358
def decode_filename(filename):
1359
"""Decode the filename using the filesystem encoding
1361
If it is unicode, it is returned.
1362
Otherwise it is decoded from the the filesystem's encoding. If decoding
1363
fails, a errors.BadFilenameEncoding exception is raised.
1365
if isinstance(filename, text_type):
1368
return filename.decode(_fs_enc)
1369
except UnicodeDecodeError:
1370
raise errors.BadFilenameEncoding(filename, _fs_enc)
1373
def safe_unicode(unicode_or_utf8_string):
1374
"""Coerce unicode_or_utf8_string into unicode.
1376
If it is unicode, it is returned.
1377
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1378
wrapped in a BzrBadParameterNotUnicode exception.
1380
if isinstance(unicode_or_utf8_string, text_type):
1381
return unicode_or_utf8_string
1383
return unicode_or_utf8_string.decode('utf8')
1384
except UnicodeDecodeError:
1385
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1388
def safe_utf8(unicode_or_utf8_string):
1389
"""Coerce unicode_or_utf8_string to a utf8 string.
1391
If it is a str, it is returned.
1392
If it is Unicode, it is encoded into a utf-8 string.
1394
if isinstance(unicode_or_utf8_string, bytes):
1395
# TODO: jam 20070209 This is overkill, and probably has an impact on
1396
# performance if we are dealing with lots of apis that want a
1399
# Make sure it is a valid utf-8 string
1400
unicode_or_utf8_string.decode('utf-8')
1401
except UnicodeDecodeError:
1402
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1403
return unicode_or_utf8_string
1404
return unicode_or_utf8_string.encode('utf-8')
1407
def safe_revision_id(unicode_or_utf8_string):
1408
"""Revision ids should now be utf8, but at one point they were unicode.
1410
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1412
:return: None or a utf8 revision id.
1414
if (unicode_or_utf8_string is None
1415
or unicode_or_utf8_string.__class__ == bytes):
1416
return unicode_or_utf8_string
1417
raise TypeError('Unicode revision ids are no longer supported. '
1418
'Revision id generators should be creating utf8 revision '
1422
def safe_file_id(unicode_or_utf8_string):
1423
"""File ids should now be utf8, but at one point they were unicode.
1425
This is the same as safe_utf8, except it uses the cached encode functions
1426
to save a little bit of performance.
1428
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1430
:return: None or a utf8 file id.
1432
if (unicode_or_utf8_string is None
1433
or unicode_or_utf8_string.__class__ == bytes):
1434
return unicode_or_utf8_string
1435
raise TypeError('Unicode file ids are no longer supported. '
1436
'File id generators should be creating utf8 file ids.')
1439
_platform_normalizes_filenames = False
1440
if sys.platform == 'darwin':
1441
_platform_normalizes_filenames = True
1444
def normalizes_filenames():
1445
"""Return True if this platform normalizes unicode filenames.
1449
return _platform_normalizes_filenames
1452
def _accessible_normalized_filename(path):
1453
"""Get the unicode normalized path, and if you can access the file.
1455
On platforms where the system normalizes filenames (Mac OSX),
1456
you can access a file by any path which will normalize correctly.
1457
On platforms where the system does not normalize filenames
1458
(everything else), you have to access a file by its exact path.
1460
Internally, bzr only supports NFC normalization, since that is
1461
the standard for XML documents.
1463
So return the normalized path, and a flag indicating if the file
1464
can be accessed by that path.
1467
if isinstance(path, bytes):
1468
path = path.decode(sys.getfilesystemencoding())
1469
return unicodedata.normalize('NFC', path), True
1472
def _inaccessible_normalized_filename(path):
1473
__doc__ = _accessible_normalized_filename.__doc__
1475
if isinstance(path, bytes):
1476
path = path.decode(sys.getfilesystemencoding())
1477
normalized = unicodedata.normalize('NFC', path)
1478
return normalized, normalized == path
1481
if _platform_normalizes_filenames:
1482
normalized_filename = _accessible_normalized_filename
1484
normalized_filename = _inaccessible_normalized_filename
1487
def set_signal_handler(signum, handler, restart_syscall=True):
1488
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1489
on platforms that support that.
1491
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1492
automatically restart (by calling `signal.siginterrupt(signum,
1493
False)`). May be ignored if the feature is not available on this
1494
platform or Python version.
1498
siginterrupt = signal.siginterrupt
1500
# This python implementation doesn't provide signal support, hence no
1503
except AttributeError:
1504
# siginterrupt doesn't exist on this platform, or for this version
1506
siginterrupt = lambda signum, flag: None
1508
def sig_handler(*args):
1509
# Python resets the siginterrupt flag when a signal is
1510
# received. <http://bugs.python.org/issue8354>
1511
# As a workaround for some cases, set it back the way we want it.
1512
siginterrupt(signum, False)
1513
# Now run the handler function passed to set_signal_handler.
1516
sig_handler = handler
1517
old_handler = signal.signal(signum, sig_handler)
1519
siginterrupt(signum, False)
1523
default_terminal_width = 80
1524
"""The default terminal width for ttys.
1526
This is defined so that higher levels can share a common fallback value when
1527
terminal_width() returns None.
1530
# Keep some state so that terminal_width can detect if _terminal_size has
1531
# returned a different size since the process started. See docstring and
1532
# comments of terminal_width for details.
1533
# _terminal_size_state has 3 possible values: no_data, unchanged, and changed.
1534
_terminal_size_state = 'no_data'
1535
_first_terminal_size = None
1537
def terminal_width():
1538
"""Return terminal width.
1540
None is returned if the width can't established precisely.
1543
- if BRZ_COLUMNS is set, returns its value
1544
- if there is no controlling terminal, returns None
1545
- query the OS, if the queried size has changed since the last query,
1547
- if COLUMNS is set, returns its value,
1548
- if the OS has a value (even though it's never changed), return its value.
1550
From there, we need to query the OS to get the size of the controlling
1553
On Unices we query the OS by:
1554
- get termios.TIOCGWINSZ
1555
- if an error occurs or a negative value is obtained, returns None
1557
On Windows we query the OS by:
1558
- win32utils.get_console_size() decides,
1559
- returns None on error (provided default value)
1561
# Note to implementors: if changing the rules for determining the width,
1562
# make sure you've considered the behaviour in these cases:
1563
# - M-x shell in emacs, where $COLUMNS is set and TIOCGWINSZ returns 0,0.
1564
# - brz log | less, in bash, where $COLUMNS not set and TIOCGWINSZ returns
1566
# - (add more interesting cases here, if you find any)
1567
# Some programs implement "Use $COLUMNS (if set) until SIGWINCH occurs",
1568
# but we don't want to register a signal handler because it is impossible
1569
# to do so without risking EINTR errors in Python <= 2.6.5 (see
1570
# <http://bugs.python.org/issue8354>). Instead we check TIOCGWINSZ every
1571
# time so we can notice if the reported size has changed, which should have
1574
# If BRZ_COLUMNS is set, take it, user is always right
1575
# Except if they specified 0 in which case, impose no limit here
1577
width = int(os.environ['BRZ_COLUMNS'])
1578
except (KeyError, ValueError):
1580
if width is not None:
1586
isatty = getattr(sys.stdout, 'isatty', None)
1587
if isatty is None or not isatty():
1588
# Don't guess, setting BRZ_COLUMNS is the recommended way to override.
1592
width, height = os_size = _terminal_size(None, None)
1593
global _first_terminal_size, _terminal_size_state
1594
if _terminal_size_state == 'no_data':
1595
_first_terminal_size = os_size
1596
_terminal_size_state = 'unchanged'
1597
elif (_terminal_size_state == 'unchanged' and
1598
_first_terminal_size != os_size):
1599
_terminal_size_state = 'changed'
1601
# If the OS claims to know how wide the terminal is, and this value has
1602
# ever changed, use that.
1603
if _terminal_size_state == 'changed':
1604
if width is not None and width > 0:
1607
# If COLUMNS is set, use it.
1609
return int(os.environ['COLUMNS'])
1610
except (KeyError, ValueError):
1613
# Finally, use an unchanged size from the OS, if we have one.
1614
if _terminal_size_state == 'unchanged':
1615
if width is not None and width > 0:
1618
# The width could not be determined.
1622
def _win32_terminal_size(width, height):
1623
width, height = win32utils.get_console_size(defaultx=width, defaulty=height)
1624
return width, height
1627
def _ioctl_terminal_size(width, height):
1629
import struct, fcntl, termios
1630
s = struct.pack('HHHH', 0, 0, 0, 0)
1631
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
1632
height, width = struct.unpack('HHHH', x)[0:2]
1633
except (IOError, AttributeError):
1635
return width, height
1637
_terminal_size = None
1638
"""Returns the terminal size as (width, height).
1640
:param width: Default value for width.
1641
:param height: Default value for height.
1643
This is defined specifically for each OS and query the size of the controlling
1644
terminal. If any error occurs, the provided default values should be returned.
1646
if sys.platform == 'win32':
1647
_terminal_size = _win32_terminal_size
1649
_terminal_size = _ioctl_terminal_size
1652
def supports_executable():
1653
return sys.platform != "win32"
1656
def supports_posix_readonly():
1657
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1659
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1660
directory controls creation/deletion, etc.
1662
And under win32, readonly means that the directory itself cannot be
1663
deleted. The contents of a readonly directory can be changed, unlike POSIX
1664
where files in readonly directories cannot be added, deleted or renamed.
1666
return sys.platform != "win32"
1669
def set_or_unset_env(env_variable, value):
1670
"""Modify the environment, setting or removing the env_variable.
1672
:param env_variable: The environment variable in question
1673
:param value: The value to set the environment to. If None, then
1674
the variable will be removed.
1675
:return: The original value of the environment variable.
1677
orig_val = os.environ.get(env_variable)
1679
if orig_val is not None:
1680
del os.environ[env_variable]
1682
if not PY3 and isinstance(value, text_type):
1683
value = value.encode(get_user_encoding())
1684
os.environ[env_variable] = value
1688
_validWin32PathRE = re.compile(r'^([A-Za-z]:[/\\])?[^:<>*"?\|]*$')
1691
def check_legal_path(path):
1692
"""Check whether the supplied path is legal.
1693
This is only required on Windows, so we don't test on other platforms
1696
if sys.platform != "win32":
1698
if _validWin32PathRE.match(path) is None:
1699
raise errors.IllegalPath(path)
1702
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1704
def _is_error_enotdir(e):
1705
"""Check if this exception represents ENOTDIR.
1707
Unfortunately, python is very inconsistent about the exception
1708
here. The cases are:
1709
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1710
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1711
which is the windows error code.
1712
3) Windows, Python2.5 uses errno == EINVAL and
1713
winerror == ERROR_DIRECTORY
1715
:param e: An Exception object (expected to be OSError with an errno
1716
attribute, but we should be able to cope with anything)
1717
:return: True if this represents an ENOTDIR error. False otherwise.
1719
en = getattr(e, 'errno', None)
1720
if (en == errno.ENOTDIR
1721
or (sys.platform == 'win32'
1722
and (en == _WIN32_ERROR_DIRECTORY
1723
or (en == errno.EINVAL
1724
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
1730
def walkdirs(top, prefix=""):
1731
"""Yield data about all the directories in a tree.
1733
This yields all the data about the contents of a directory at a time.
1734
After each directory has been yielded, if the caller has mutated the list
1735
to exclude some directories, they are then not descended into.
1737
The data yielded is of the form:
1738
((directory-relpath, directory-path-from-top),
1739
[(relpath, basename, kind, lstat, path-from-top), ...]),
1740
- directory-relpath is the relative path of the directory being returned
1741
with respect to top. prefix is prepended to this.
1742
- directory-path-from-root is the path including top for this directory.
1743
It is suitable for use with os functions.
1744
- relpath is the relative path within the subtree being walked.
1745
- basename is the basename of the path
1746
- kind is the kind of the file now. If unknown then the file is not
1747
present within the tree - but it may be recorded as versioned. See
1749
- lstat is the stat data *if* the file was statted.
1750
- planned, not implemented:
1751
path_from_tree_root is the path from the root of the tree.
1753
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1754
allows one to walk a subtree but get paths that are relative to a tree
1756
:return: an iterator over the dirs.
1758
#TODO there is a bit of a smell where the results of the directory-
1759
# summary in this, and the path from the root, may not agree
1760
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
1761
# potentially confusing output. We should make this more robust - but
1762
# not at a speed cost. RBC 20060731
1764
_directory = _directory_kind
1765
_listdir = os.listdir
1766
_kind_from_mode = file_kind_from_stat_mode
1767
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
1769
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1770
relroot, _, _, _, top = pending.pop()
1772
relprefix = relroot + u'/'
1775
top_slash = top + u'/'
1778
append = dirblock.append
1780
names = sorted(map(decode_filename, _listdir(top)))
1781
except OSError as e:
1782
if not _is_error_enotdir(e):
1786
abspath = top_slash + name
1787
statvalue = _lstat(abspath)
1788
kind = _kind_from_mode(statvalue.st_mode)
1789
append((relprefix + name, name, kind, statvalue, abspath))
1790
yield (relroot, top), dirblock
1792
# push the user specified dirs from dirblock
1793
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1796
class DirReader(object):
1797
"""An interface for reading directories."""
1799
def top_prefix_to_starting_dir(self, top, prefix=""):
1800
"""Converts top and prefix to a starting dir entry
1802
:param top: A utf8 path
1803
:param prefix: An optional utf8 path to prefix output relative paths
1805
:return: A tuple starting with prefix, and ending with the native
1808
raise NotImplementedError(self.top_prefix_to_starting_dir)
1810
def read_dir(self, prefix, top):
1811
"""Read a specific dir.
1813
:param prefix: A utf8 prefix to be preprended to the path basenames.
1814
:param top: A natively encoded path to read.
1815
:return: A list of the directories contents. Each item contains:
1816
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1818
raise NotImplementedError(self.read_dir)
1821
_selected_dir_reader = None
1824
def _walkdirs_utf8(top, prefix=""):
1825
"""Yield data about all the directories in a tree.
1827
This yields the same information as walkdirs() only each entry is yielded
1828
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1829
are returned as exact byte-strings.
1831
:return: yields a tuple of (dir_info, [file_info])
1832
dir_info is (utf8_relpath, path-from-top)
1833
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1834
if top is an absolute path, path-from-top is also an absolute path.
1835
path-from-top might be unicode or utf8, but it is the correct path to
1836
pass to os functions to affect the file in question. (such as os.lstat)
1838
global _selected_dir_reader
1839
if _selected_dir_reader is None:
1840
if sys.platform == "win32":
1842
from ._walkdirs_win32 import Win32ReadDir
1843
_selected_dir_reader = Win32ReadDir()
1846
elif _fs_enc in ('utf-8', 'ascii'):
1848
from ._readdir_pyx import UTF8DirReader
1849
_selected_dir_reader = UTF8DirReader()
1850
except ImportError as e:
1851
failed_to_load_extension(e)
1854
if _selected_dir_reader is None:
1855
# Fallback to the python version
1856
_selected_dir_reader = UnicodeDirReader()
1858
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1859
# But we don't actually uses 1-3 in pending, so set them to None
1860
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1861
read_dir = _selected_dir_reader.read_dir
1862
_directory = _directory_kind
1864
relroot, _, _, _, top = pending[-1].pop()
1867
dirblock = sorted(read_dir(relroot, top))
1868
yield (relroot, top), dirblock
1869
# push the user specified dirs from dirblock
1870
next = [d for d in reversed(dirblock) if d[2] == _directory]
1872
pending.append(next)
1875
class UnicodeDirReader(DirReader):
1876
"""A dir reader for non-utf8 file systems, which transcodes."""
1878
__slots__ = ['_utf8_encode']
1881
self._utf8_encode = codecs.getencoder('utf8')
1883
def top_prefix_to_starting_dir(self, top, prefix=""):
1884
"""See DirReader.top_prefix_to_starting_dir."""
1885
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1887
def read_dir(self, prefix, top):
1888
"""Read a single directory from a non-utf8 file system.
1890
top, and the abspath element in the output are unicode, all other paths
1891
are utf8. Local disk IO is done via unicode calls to listdir etc.
1893
This is currently the fallback code path when the filesystem encoding is
1894
not UTF-8. It may be better to implement an alternative so that we can
1895
safely handle paths that are not properly decodable in the current
1898
See DirReader.read_dir for details.
1900
_utf8_encode = self._utf8_encode
1901
_fs_decode = lambda s: s.decode(_fs_enc)
1902
_fs_encode = lambda s: s.encode(_fs_enc)
1904
_listdir = os.listdir
1905
_kind_from_mode = file_kind_from_stat_mode
1908
relprefix = prefix + b'/'
1911
top_slash = top + '/'
1914
append = dirblock.append
1915
for name_native in _listdir(top.encode('utf-8')):
1917
name = _fs_decode(name_native)
1918
except UnicodeDecodeError:
1919
raise errors.BadFilenameEncoding(
1920
relprefix + name_native, _fs_enc)
1921
name_utf8 = _utf8_encode(name)[0]
1922
abspath = top_slash + name
1923
statvalue = _lstat(abspath)
1924
kind = _kind_from_mode(statvalue.st_mode)
1925
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
1926
return sorted(dirblock)
1929
def copy_tree(from_path, to_path, handlers={}):
1930
"""Copy all of the entries in from_path into to_path.
1932
:param from_path: The base directory to copy.
1933
:param to_path: The target directory. If it does not exist, it will
1935
:param handlers: A dictionary of functions, which takes a source and
1936
destinations for files, directories, etc.
1937
It is keyed on the file kind, such as 'directory', 'symlink', or 'file'
1938
'file', 'directory', and 'symlink' should always exist.
1939
If they are missing, they will be replaced with 'os.mkdir()',
1940
'os.readlink() + os.symlink()', and 'shutil.copy2()', respectively.
1942
# Now, just copy the existing cached tree to the new location
1943
# We use a cheap trick here.
1944
# Absolute paths are prefixed with the first parameter
1945
# relative paths are prefixed with the second.
1946
# So we can get both the source and target returned
1947
# without any extra work.
1949
def copy_dir(source, dest):
1952
def copy_link(source, dest):
1953
"""Copy the contents of a symlink"""
1954
link_to = os.readlink(source)
1955
os.symlink(link_to, dest)
1957
real_handlers = {'file': shutil.copy2,
1958
'symlink': copy_link,
1959
'directory': copy_dir,
1961
real_handlers.update(handlers)
1963
if not os.path.exists(to_path):
1964
real_handlers['directory'](from_path, to_path)
1966
for dir_info, entries in walkdirs(from_path, prefix=to_path):
1967
for relpath, name, kind, st, abspath in entries:
1968
real_handlers[kind](abspath, relpath)
1971
def copy_ownership_from_path(dst, src=None):
1972
"""Copy usr/grp ownership from src file/dir to dst file/dir.
1974
If src is None, the containing directory is used as source. If chown
1975
fails, the error is ignored and a warning is printed.
1977
chown = getattr(os, 'chown', None)
1982
src = os.path.dirname(dst)
1988
chown(dst, s.st_uid, s.st_gid)
1989
except OSError as e:
1991
'Unable to copy ownership from "%s" to "%s". '
1992
'You may want to set it manually.', src, dst)
1993
trace.log_exception_quietly()
1996
def path_prefix_key(path):
1997
"""Generate a prefix-order path key for path.
1999
This can be used to sort paths in the same way that walkdirs does.
2001
return (dirname(path), path)
2004
def compare_paths_prefix_order(path_a, path_b):
2005
"""Compare path_a and path_b to generate the same order walkdirs uses."""
2006
key_a = path_prefix_key(path_a)
2007
key_b = path_prefix_key(path_b)
2008
return (key_a > key_b) - (key_a < key_b)
2011
_cached_user_encoding = None
2014
def get_user_encoding():
2015
"""Find out what the preferred user encoding is.
2017
This is generally the encoding that is used for command line parameters
2018
and file contents. This may be different from the terminal encoding
2019
or the filesystem encoding.
2021
:return: A string defining the preferred user encoding
2023
global _cached_user_encoding
2024
if _cached_user_encoding is not None:
2025
return _cached_user_encoding
2027
if os.name == 'posix' and getattr(locale, 'CODESET', None) is not None:
2028
# Use the existing locale settings and call nl_langinfo directly
2029
# rather than going through getpreferredencoding. This avoids
2030
# <http://bugs.python.org/issue6202> on OSX Python 2.6 and the
2031
# possibility of the setlocale call throwing an error.
2032
user_encoding = locale.nl_langinfo(locale.CODESET)
2034
# GZ 2011-12-19: On windows could call GetACP directly instead.
2035
user_encoding = locale.getpreferredencoding(False)
2038
user_encoding = codecs.lookup(user_encoding).name
2040
if user_encoding not in ("", "cp0"):
2041
sys.stderr.write('brz: warning:'
2042
' unknown encoding %s.'
2043
' Continuing with ascii encoding.\n'
2046
user_encoding = 'ascii'
2048
# Get 'ascii' when setlocale has not been called or LANG=C or unset.
2049
if user_encoding == 'ascii':
2050
if sys.platform == 'darwin':
2051
# OSX is special-cased in Python to have a UTF-8 filesystem
2052
# encoding and previously had LANG set here if not present.
2053
user_encoding = 'utf-8'
2054
# GZ 2011-12-19: Maybe UTF-8 should be the default in this case
2055
# for some other posix platforms as well.
2057
_cached_user_encoding = user_encoding
2058
return user_encoding
2061
def get_diff_header_encoding():
2062
return get_terminal_encoding()
2065
def get_host_name():
2066
"""Return the current unicode host name.
2068
This is meant to be used in place of socket.gethostname() because that
2069
behaves inconsistently on different platforms.
2071
if sys.platform == "win32":
2072
return win32utils.get_host_name()
2076
return socket.gethostname()
2077
return socket.gethostname().decode(get_user_encoding())
2080
# We must not read/write any more than 64k at a time from/to a socket so we
2081
# don't risk "no buffer space available" errors on some platforms. Windows in
2082
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
2084
MAX_SOCKET_CHUNK = 64 * 1024
2086
_end_of_stream_errors = [errno.ECONNRESET, errno.EPIPE, errno.EINVAL]
2087
for _eno in ['WSAECONNRESET', 'WSAECONNABORTED']:
2088
_eno = getattr(errno, _eno, None)
2089
if _eno is not None:
2090
_end_of_stream_errors.append(_eno)
2094
def read_bytes_from_socket(sock, report_activity=None,
2095
max_read_size=MAX_SOCKET_CHUNK):
2096
"""Read up to max_read_size of bytes from sock and notify of progress.
2098
Translates "Connection reset by peer" into file-like EOF (return an
2099
empty string rather than raise an error), and repeats the recv if
2100
interrupted by a signal.
2104
data = sock.recv(max_read_size)
2105
except socket.error as e:
2107
if eno in _end_of_stream_errors:
2108
# The connection was closed by the other side. Callers expect
2109
# an empty string to signal end-of-stream.
2111
elif eno == errno.EINTR:
2112
# Retry the interrupted recv.
2116
if report_activity is not None:
2117
report_activity(len(data), 'read')
2121
def recv_all(socket, count):
2122
"""Receive an exact number of bytes.
2124
Regular Socket.recv() may return less than the requested number of bytes,
2125
depending on what's in the OS buffer. MSG_WAITALL is not available
2126
on all platforms, but this should work everywhere. This will return
2127
less than the requested amount if the remote end closes.
2129
This isn't optimized and is intended mostly for use in testing.
2132
while len(b) < count:
2133
new = read_bytes_from_socket(socket, None, count - len(b))
2140
def send_all(sock, bytes, report_activity=None):
2141
"""Send all bytes on a socket.
2143
Breaks large blocks in smaller chunks to avoid buffering limitations on
2144
some platforms, and catches EINTR which may be thrown if the send is
2145
interrupted by a signal.
2147
This is preferred to socket.sendall(), because it avoids portability bugs
2148
and provides activity reporting.
2150
:param report_activity: Call this as bytes are read, see
2151
Transport._report_activity
2154
byte_count = len(bytes)
2155
view = memoryview(bytes)
2156
while sent_total < byte_count:
2158
sent = sock.send(view[sent_total:sent_total+MAX_SOCKET_CHUNK])
2159
except (socket.error, IOError) as e:
2160
if e.args[0] in _end_of_stream_errors:
2161
raise errors.ConnectionReset(
2162
"Error trying to write to socket", e)
2163
if e.args[0] != errno.EINTR:
2167
raise errors.ConnectionReset('Sending to %s returned 0 bytes'
2170
if report_activity is not None:
2171
report_activity(sent, 'write')
2174
def connect_socket(address):
2175
# Slight variation of the socket.create_connection() function (provided by
2176
# python-2.6) that can fail if getaddrinfo returns an empty list. We also
2177
# provide it for previous python versions. Also, we don't use the timeout
2178
# parameter (provided by the python implementation) so we don't implement
2180
err = socket.error('getaddrinfo returns an empty list')
2181
host, port = address
2182
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
2183
af, socktype, proto, canonname, sa = res
2186
sock = socket.socket(af, socktype, proto)
2190
except socket.error as e:
2192
# 'err' is now the most recent error
2193
if sock is not None:
2198
def dereference_path(path):
2199
"""Determine the real path to a file.
2201
All parent elements are dereferenced. But the file itself is not
2203
:param path: The original path. May be absolute or relative.
2204
:return: the real path *to* the file
2206
parent, base = os.path.split(path)
2207
# The pathjoin for '.' is a workaround for Python bug #1213894.
2208
# (initial path components aren't dereferenced)
2209
return pathjoin(realpath(pathjoin('.', parent)), base)
2212
def supports_mapi():
2213
"""Return True if we can use MAPI to launch a mail client."""
2214
return sys.platform == "win32"
2217
def resource_string(package, resource_name):
2218
"""Load a resource from a package and return it as a string.
2220
Note: Only packages that start with breezy are currently supported.
2222
This is designed to be a lightweight implementation of resource
2223
loading in a way which is API compatible with the same API from
2225
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2226
If and when pkg_resources becomes a standard library, this routine
2229
# Check package name is within breezy
2230
if package == "breezy":
2231
resource_relpath = resource_name
2232
elif package.startswith("breezy."):
2233
package = package[len("breezy."):].replace('.', os.sep)
2234
resource_relpath = pathjoin(package, resource_name)
2236
raise errors.BzrError('resource package %s not in breezy' % package)
2238
# Map the resource to a file and read its contents
2239
base = dirname(breezy.__file__)
2240
if getattr(sys, 'frozen', None): # bzr.exe
2241
base = abspath(pathjoin(base, '..', '..'))
2242
with open(pathjoin(base, resource_relpath), "rt") as f:
2245
def file_kind_from_stat_mode_thunk(mode):
2246
global file_kind_from_stat_mode
2247
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2249
from ._readdir_pyx import UTF8DirReader
2250
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2251
except ImportError as e:
2252
# This is one time where we won't warn that an extension failed to
2253
# load. The extension is never available on Windows anyway.
2254
from ._readdir_py import (
2255
_kind_from_mode as file_kind_from_stat_mode
2257
return file_kind_from_stat_mode(mode)
2258
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2260
def file_stat(f, _lstat=os.lstat):
2264
except OSError as e:
2265
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2266
raise errors.NoSuchFile(f)
2269
def file_kind(f, _lstat=os.lstat):
2270
stat_value = file_stat(f, _lstat)
2271
return file_kind_from_stat_mode(stat_value.st_mode)
2273
def until_no_eintr(f, *a, **kw):
2274
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2276
WARNING: you must be certain that it is safe to retry the call repeatedly
2277
if EINTR does occur. This is typically only true for low-level operations
2278
like os.read. If in any doubt, don't use this.
2280
Keep in mind that this is not a complete solution to EINTR. There is
2281
probably code in the Python standard library and other dependencies that
2282
may encounter EINTR if a signal arrives (and there is signal handler for
2283
that signal). So this function can reduce the impact for IO that breezy
2284
directly controls, but it is not a complete solution.
2286
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2290
except (IOError, OSError) as e:
2291
if e.errno == errno.EINTR:
2296
if sys.platform == "win32":
2299
return msvcrt.getch()
2304
fd = sys.stdin.fileno()
2305
settings = termios.tcgetattr(fd)
2308
ch = sys.stdin.read(1)
2310
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2313
if sys.platform.startswith('linux'):
2314
def _local_concurrency():
2316
return os.sysconf('SC_NPROCESSORS_ONLN')
2317
except (ValueError, OSError, AttributeError):
2319
elif sys.platform == 'darwin':
2320
def _local_concurrency():
2321
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2322
stdout=subprocess.PIPE).communicate()[0]
2323
elif "bsd" in sys.platform:
2324
def _local_concurrency():
2325
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2326
stdout=subprocess.PIPE).communicate()[0]
2327
elif sys.platform == 'sunos5':
2328
def _local_concurrency():
2329
return subprocess.Popen(['psrinfo', '-p',],
2330
stdout=subprocess.PIPE).communicate()[0]
2331
elif sys.platform == "win32":
2332
def _local_concurrency():
2333
# This appears to return the number of cores.
2334
return os.environ.get('NUMBER_OF_PROCESSORS')
2336
def _local_concurrency():
2341
_cached_local_concurrency = None
2343
def local_concurrency(use_cache=True):
2344
"""Return how many processes can be run concurrently.
2346
Rely on platform specific implementations and default to 1 (one) if
2347
anything goes wrong.
2349
global _cached_local_concurrency
2351
if _cached_local_concurrency is not None and use_cache:
2352
return _cached_local_concurrency
2354
concurrency = os.environ.get('BRZ_CONCURRENCY', None)
2355
if concurrency is None:
2356
import multiprocessing
2358
concurrency = multiprocessing.cpu_count()
2359
except NotImplementedError:
2360
# multiprocessing.cpu_count() isn't implemented on all platforms
2362
concurrency = _local_concurrency()
2363
except (OSError, IOError):
2366
concurrency = int(concurrency)
2367
except (TypeError, ValueError):
2370
_cached_concurrency = concurrency
2374
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2375
"""A stream writer that doesn't decode str arguments."""
2377
def __init__(self, encode, stream, errors='strict'):
2378
codecs.StreamWriter.__init__(self, stream, errors)
2379
self.encode = encode
2381
def write(self, object):
2382
if isinstance(object, str):
2383
self.stream.write(object)
2385
data, _ = self.encode(object, self.errors)
2386
self.stream.write(data)
2389
if sys.platform == 'win32':
2390
def open_file(filename, mode='r', bufsize=-1):
2391
"""This function is used to override the ``open`` builtin.
2393
But it uses O_NOINHERIT flag so the file handle is not inherited by
2394
child processes. Deleting or renaming a closed file opened with this
2395
function is not blocking child processes.
2397
writing = 'w' in mode
2398
appending = 'a' in mode
2399
updating = '+' in mode
2400
binary = 'b' in mode
2403
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2404
# for flags for each modes.
2414
flags |= os.O_WRONLY
2415
flags |= os.O_CREAT | os.O_TRUNC
2420
flags |= os.O_WRONLY
2421
flags |= os.O_CREAT | os.O_APPEND
2426
flags |= os.O_RDONLY
2428
return os.fdopen(os.open(filename, flags), mode, bufsize)
2433
def available_backup_name(base, exists):
2434
"""Find a non-existing backup file name.
2436
This will *not* create anything, this only return a 'free' entry. This
2437
should be used for checking names in a directory below a locked
2438
tree/branch/repo to avoid race conditions. This is LBYL (Look Before You
2439
Leap) and generally discouraged.
2441
:param base: The base name.
2443
:param exists: A callable returning True if the path parameter exists.
2446
name = "%s.~%d~" % (base, counter)
2449
name = "%s.~%d~" % (base, counter)
2453
def set_fd_cloexec(fd):
2454
"""Set a Unix file descriptor's FD_CLOEXEC flag. Do nothing if platform
2455
support for this is not available.
2459
old = fcntl.fcntl(fd, fcntl.F_GETFD)
2460
fcntl.fcntl(fd, fcntl.F_SETFD, old | fcntl.FD_CLOEXEC)
2461
except (ImportError, AttributeError):
2462
# Either the fcntl module or specific constants are not present
2466
def find_executable_on_path(name):
2467
"""Finds an executable on the PATH.
2469
On Windows, this will try to append each extension in the PATHEXT
2470
environment variable to the name, if it cannot be found with the name
2473
:param name: The base name of the executable.
2474
:return: The path to the executable found or None.
2476
if sys.platform == 'win32':
2477
exts = os.environ.get('PATHEXT', '').split(os.pathsep)
2478
exts = [ext.lower() for ext in exts]
2479
base, ext = os.path.splitext(name)
2481
if ext.lower() not in exts:
2487
path = os.environ.get('PATH')
2488
if path is not None:
2489
path = path.split(os.pathsep)
2492
f = os.path.join(d, name) + ext
2493
if os.access(f, os.X_OK):
2495
if sys.platform == 'win32':
2496
app_path = win32utils.get_app_path(name)
2497
if app_path != name:
2502
def _posix_is_local_pid_dead(pid):
2503
"""True if pid doesn't correspond to live process on this machine"""
2505
# Special meaning of unix kill: just check if it's there.
2507
except OSError as e:
2508
if e.errno == errno.ESRCH:
2509
# On this machine, and really not found: as sure as we can be
2512
elif e.errno == errno.EPERM:
2513
# exists, though not ours
2516
mutter("os.kill(%d, 0) failed: %s" % (pid, e))
2517
# Don't really know.
2520
# Exists and our process: not dead.
2523
if sys.platform == "win32":
2524
is_local_pid_dead = win32utils.is_local_pid_dead
2526
is_local_pid_dead = _posix_is_local_pid_dead
2528
_maybe_ignored = ['EAGAIN', 'EINTR', 'ENOTSUP', 'EOPNOTSUPP', 'EACCES']
2529
_fdatasync_ignored = [getattr(errno, name) for name in _maybe_ignored
2530
if getattr(errno, name, None) is not None]
2533
def fdatasync(fileno):
2534
"""Flush file contents to disk if possible.
2536
:param fileno: Integer OS file handle.
2537
:raises TransportNotPossible: If flushing to disk is not possible.
2539
fn = getattr(os, 'fdatasync', getattr(os, 'fsync', None))
2543
except IOError as e:
2544
# See bug #1075108, on some platforms fdatasync exists, but can
2545
# raise ENOTSUP. However, we are calling fdatasync to be helpful
2546
# and reduce the chance of corruption-on-powerloss situations. It
2547
# is not a mandatory call, so it is ok to suppress failures.
2548
trace.mutter("ignoring error calling fdatasync: %s" % (e,))
2549
if getattr(e, 'errno', None) not in _fdatasync_ignored:
2553
def ensure_empty_directory_exists(path, exception_class):
2554
"""Make sure a local directory exists and is empty.
2556
If it does not exist, it is created. If it exists and is not empty, an
2557
instance of exception_class is raised.
2561
except OSError as e:
2562
if e.errno != errno.EEXIST:
2564
if os.listdir(path) != []:
2565
raise exception_class(path)
2568
def is_environment_error(evalue):
2569
"""True if exception instance is due to a process environment issue
2571
This includes OSError and IOError, but also other errors that come from
2572
the operating system or core libraries but are not subclasses of those.
2574
if isinstance(evalue, (EnvironmentError, select.error)):
2576
if sys.platform == "win32" and win32utils._is_pywintypes_error(evalue):