1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
27
from .lazy_import import lazy_import
28
lazy_import(globals(), """
29
from datetime import datetime
35
# We need to import both shutil and rmtree as we export the later on posix
36
# and need the former on windows
38
from shutil import rmtree
41
# We need to import both tempfile and mkdtemp as we export the later on posix
42
# and need the former on windows
44
from tempfile import mkdtemp
52
from breezy.i18n import gettext
73
# Cross platform wall-clock time functionality with decent resolution.
74
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
75
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
76
# synchronized with ``time.time()``, this is only meant to be used to find
77
# delta times by subtracting from another call to this function.
78
timer_func = time.time
79
if sys.platform == 'win32':
80
timer_func = time.clock
82
# On win32, O_BINARY is used to indicate the file should
83
# be opened in binary mode, rather than text mode.
84
# On other platforms, O_BINARY doesn't exist, because
85
# they always open in binary mode, so it is okay to
86
# OR with 0 on those platforms.
87
# O_NOINHERIT and O_TEXT exists only on win32 too.
88
O_BINARY = getattr(os, 'O_BINARY', 0)
89
O_TEXT = getattr(os, 'O_TEXT', 0)
90
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
93
class UnsupportedTimezoneFormat(errors.BzrError):
95
_fmt = ('Unsupported timezone format "%(timezone)s", '
96
'options are "utc", "original", "local".')
98
def __init__(self, timezone):
99
self.timezone = timezone
102
def get_unicode_argv():
106
user_encoding = get_user_encoding()
107
return [a.decode(user_encoding) for a in sys.argv[1:]]
108
except UnicodeDecodeError:
109
raise errors.BzrError(gettext("Parameter {0!r} encoding is unsupported by {1} "
110
"application locale.").format(a, user_encoding))
113
def make_readonly(filename):
114
"""Make a filename read-only."""
115
mod = os.lstat(filename).st_mode
116
if not stat.S_ISLNK(mod):
118
chmod_if_possible(filename, mod)
121
def make_writable(filename):
122
mod = os.lstat(filename).st_mode
123
if not stat.S_ISLNK(mod):
125
chmod_if_possible(filename, mod)
128
def chmod_if_possible(filename, mode):
129
# Set file mode if that can be safely done.
130
# Sometimes even on unix the filesystem won't allow it - see
131
# https://bugs.launchpad.net/bzr/+bug/606537
133
# It is probably faster to just do the chmod, rather than
134
# doing a stat, and then trying to compare
135
os.chmod(filename, mode)
136
except (IOError, OSError) as e:
137
# Permission/access denied seems to commonly happen on smbfs; there's
138
# probably no point warning about it.
139
# <https://bugs.launchpad.net/bzr/+bug/606537>
140
if getattr(e, 'errno') in (errno.EPERM, errno.EACCES):
141
trace.mutter("ignore error on chmod of %r: %r" % (
147
def minimum_path_selection(paths):
148
"""Return the smallset subset of paths which are outside paths.
150
:param paths: A container (and hence not None) of paths.
151
:return: A set of paths sufficient to include everything in paths via
152
is_inside, drawn from the paths parameter.
158
if isinstance(path, bytes):
159
return path.split(b'/')
161
return path.split('/')
162
sorted_paths = sorted(list(paths), key=sort_key)
164
search_paths = [sorted_paths[0]]
165
for path in sorted_paths[1:]:
166
if not is_inside(search_paths[-1], path):
167
# This path is unique, add it
168
search_paths.append(path)
170
return set(search_paths)
177
"""Return a quoted filename filename
179
This previously used backslash quoting, but that works poorly on
181
# TODO: I'm not really sure this is the best format either.x
183
if _QUOTE_RE is None:
184
_QUOTE_RE = re.compile(r'([^a-zA-Z0-9.,:/\\_~-])')
186
if _QUOTE_RE.search(f):
192
_directory_kind = 'directory'
196
"""Return the current umask"""
197
# Assume that people aren't messing with the umask while running
198
# XXX: This is not thread safe, but there is no way to get the
199
# umask without setting it
207
_directory_kind: "/",
209
'tree-reference': '+',
213
def kind_marker(kind):
215
return _kind_marker_map[kind]
217
# Slightly faster than using .get(, '') when the common case is that
222
lexists = getattr(os.path, 'lexists', None)
226
stat = getattr(os, 'lstat', os.stat)
230
if e.errno == errno.ENOENT:
233
raise errors.BzrError(
234
gettext("lstat/stat of ({0!r}): {1!r}").format(f, e))
237
def fancy_rename(old, new, rename_func, unlink_func):
238
"""A fancy rename, when you don't have atomic rename.
240
:param old: The old path, to rename from
241
:param new: The new path, to rename to
242
:param rename_func: The potentially non-atomic rename function
243
:param unlink_func: A way to delete the target file if the full rename
246
# sftp rename doesn't allow overwriting, so play tricks:
247
base = os.path.basename(new)
248
dirname = os.path.dirname(new)
249
# callers use different encodings for the paths so the following MUST
250
# respect that. We rely on python upcasting to unicode if new is unicode
251
# and keeping a str if not.
252
tmp_name = 'tmp.%s.%.9f.%d.%s' % (base, time.time(),
253
os.getpid(), rand_chars(10))
254
tmp_name = pathjoin(dirname, tmp_name)
256
# Rename the file out of the way, but keep track if it didn't exist
257
# We don't want to grab just any exception
258
# something like EACCES should prevent us from continuing
259
# The downside is that the rename_func has to throw an exception
260
# with an errno = ENOENT, or NoSuchFile
263
rename_func(new, tmp_name)
264
except (errors.NoSuchFile,):
267
# RBC 20060103 abstraction leakage: the paramiko SFTP clients rename
268
# function raises an IOError with errno is None when a rename fails.
269
# This then gets caught here.
270
if e.errno not in (None, errno.ENOENT, errno.ENOTDIR):
272
except Exception as e:
273
if (getattr(e, 'errno', None) is None
274
or e.errno not in (errno.ENOENT, errno.ENOTDIR)):
281
# This may throw an exception, in which case success will
283
rename_func(old, new)
285
except (IOError, OSError) as e:
286
# source and target may be aliases of each other (e.g. on a
287
# case-insensitive filesystem), so we may have accidentally renamed
288
# source by when we tried to rename target
289
if (file_existed and e.errno in (None, errno.ENOENT)
290
and old.lower() == new.lower()):
291
# source and target are the same file on a case-insensitive
292
# filesystem, so we don't generate an exception
298
# If the file used to exist, rename it back into place
299
# otherwise just delete it from the tmp location
301
unlink_func(tmp_name)
303
rename_func(tmp_name, new)
306
# In Python 2.4.2 and older, os.path.abspath and os.path.realpath
307
# choke on a Unicode string containing a relative path if
308
# os.getcwd() returns a non-sys.getdefaultencoding()-encoded
310
def _posix_abspath(path):
311
# jam 20060426 rather than encoding to fsencoding
312
# copy posixpath.abspath, but use os.getcwdu instead
313
if not posixpath.isabs(path):
314
path = posixpath.join(getcwd(), path)
315
return _posix_normpath(path)
318
def _posix_realpath(path):
319
return posixpath.realpath(path.encode(_fs_enc)).decode(_fs_enc)
322
def _posix_normpath(path):
323
path = posixpath.normpath(path)
324
# Bug 861008: posixpath.normpath() returns a path normalized according to
325
# the POSIX standard, which stipulates (for compatibility reasons) that two
326
# leading slashes must not be simplified to one, and only if there are 3 or
327
# more should they be simplified as one. So we treat the leading 2 slashes
328
# as a special case here by simply removing the first slash, as we consider
329
# that breaking POSIX compatibility for this obscure feature is acceptable.
330
# This is not a paranoid precaution, as we notably get paths like this when
331
# the repo is hosted at the root of the filesystem, i.e. in "/".
332
if path.startswith('//'):
337
def _posix_path_from_environ(key):
338
"""Get unicode path from `key` in environment or None if not present
340
Note that posix systems use arbitrary byte strings for filesystem objects,
341
so a path that raises BadFilenameEncoding here may still be accessible.
343
val = os.environ.get(key, None)
344
if PY3 or val is None:
347
return val.decode(_fs_enc)
348
except UnicodeDecodeError:
349
# GZ 2011-12-12:Ideally want to include `key` in the exception message
350
raise errors.BadFilenameEncoding(val, _fs_enc)
353
def _posix_get_home_dir():
354
"""Get the home directory of the current user as a unicode path"""
355
path = posixpath.expanduser("~")
357
return path.decode(_fs_enc)
358
except AttributeError:
360
except UnicodeDecodeError:
361
raise errors.BadFilenameEncoding(path, _fs_enc)
364
def _posix_getuser_unicode():
365
"""Get username from environment or password database as unicode"""
366
name = getpass.getuser()
369
user_encoding = get_user_encoding()
371
return name.decode(user_encoding)
372
except UnicodeDecodeError:
373
raise errors.BzrError("Encoding of username %r is unsupported by %s "
374
"application locale." % (name, user_encoding))
377
def _win32_fixdrive(path):
378
"""Force drive letters to be consistent.
380
win32 is inconsistent whether it returns lower or upper case
381
and even if it was consistent the user might type the other
382
so we force it to uppercase
383
running python.exe under cmd.exe return capital C:\\
384
running win32 python inside a cygwin shell returns lowercase c:\\
386
drive, path = ntpath.splitdrive(path)
387
return drive.upper() + path
390
def _win32_abspath(path):
391
# Real ntpath.abspath doesn't have a problem with a unicode cwd
392
return _win32_fixdrive(ntpath.abspath(path).replace('\\', '/'))
395
def _win32_realpath(path):
396
# Real ntpath.realpath doesn't have a problem with a unicode cwd
397
return _win32_fixdrive(ntpath.realpath(path).replace('\\', '/'))
400
def _win32_pathjoin(*args):
401
return ntpath.join(*args).replace('\\', '/')
404
def _win32_normpath(path):
405
return _win32_fixdrive(ntpath.normpath(path).replace('\\', '/'))
409
return _win32_fixdrive(_getcwd().replace('\\', '/'))
412
def _win32_mkdtemp(*args, **kwargs):
413
return _win32_fixdrive(tempfile.mkdtemp(*args, **kwargs).replace('\\', '/'))
416
def _win32_rename(old, new):
417
"""We expect to be able to atomically replace 'new' with old.
419
On win32, if new exists, it must be moved out of the way first,
423
fancy_rename(old, new, rename_func=os.rename, unlink_func=os.unlink)
425
if e.errno in (errno.EPERM, errno.EACCES, errno.EBUSY, errno.EINVAL):
426
# If we try to rename a non-existant file onto cwd, we get
427
# EPERM or EACCES instead of ENOENT, this will raise ENOENT
428
# if the old path doesn't exist, sometimes we get EACCES
429
# On Linux, we seem to get EBUSY, on Mac we get EINVAL
435
return unicodedata.normalize('NFC', _getcwd())
438
def _rename_wrap_exception(rename_func):
439
"""Adds extra information to any exceptions that come from rename().
441
The exception has an updated message and 'old_filename' and 'new_filename'
445
def _rename_wrapper(old, new):
447
rename_func(old, new)
449
detailed_error = OSError(e.errno, e.strerror +
450
" [occurred when renaming '%s' to '%s']" %
452
detailed_error.old_filename = old
453
detailed_error.new_filename = new
456
return _rename_wrapper
459
if sys.version_info > (3,):
465
# Default rename wraps os.rename()
466
rename = _rename_wrap_exception(os.rename)
468
# Default is to just use the python builtins, but these can be rebound on
469
# particular platforms.
470
abspath = _posix_abspath
471
realpath = _posix_realpath
472
pathjoin = os.path.join
473
normpath = _posix_normpath
474
path_from_environ = _posix_path_from_environ
475
_get_home_dir = _posix_get_home_dir
476
getuser_unicode = _posix_getuser_unicode
478
dirname = os.path.dirname
479
basename = os.path.basename
480
split = os.path.split
481
splitext = os.path.splitext
482
# These were already lazily imported into local scope
483
# mkdtemp = tempfile.mkdtemp
484
# rmtree = shutil.rmtree
493
MIN_ABS_PATHLENGTH = 1
496
if sys.platform == 'win32':
497
abspath = _win32_abspath
498
realpath = _win32_realpath
499
pathjoin = _win32_pathjoin
500
normpath = _win32_normpath
501
getcwd = _win32_getcwd
502
mkdtemp = _win32_mkdtemp
503
rename = _rename_wrap_exception(_win32_rename)
505
from . import _walkdirs_win32
509
lstat = _walkdirs_win32.lstat
510
fstat = _walkdirs_win32.fstat
511
wrap_stat = _walkdirs_win32.wrap_stat
513
MIN_ABS_PATHLENGTH = 3
515
def _win32_delete_readonly(function, path, excinfo):
516
"""Error handler for shutil.rmtree function [for win32]
517
Helps to remove files and dirs marked as read-only.
519
exception = excinfo[1]
520
if function in (os.remove, os.rmdir) \
521
and isinstance(exception, OSError) \
522
and exception.errno == errno.EACCES:
528
def rmtree(path, ignore_errors=False, onerror=_win32_delete_readonly):
529
"""Replacer for shutil.rmtree: could remove readonly dirs/files"""
530
return shutil.rmtree(path, ignore_errors, onerror)
532
f = win32utils.get_unicode_argv # special function or None
535
path_from_environ = win32utils.get_environ_unicode
536
_get_home_dir = win32utils.get_home_location
537
getuser_unicode = win32utils.get_user_name
539
elif sys.platform == 'darwin':
543
def get_terminal_encoding(trace=False):
544
"""Find the best encoding for printing to the screen.
546
This attempts to check both sys.stdout and sys.stdin to see
547
what encoding they are in, and if that fails it falls back to
548
osutils.get_user_encoding().
549
The problem is that on Windows, locale.getpreferredencoding()
550
is not the same encoding as that used by the console:
551
http://mail.python.org/pipermail/python-list/2003-May/162357.html
553
On my standard US Windows XP, the preferred encoding is
554
cp1252, but the console is cp437
556
:param trace: If True trace the selected encoding via mutter().
558
from .trace import mutter
559
output_encoding = getattr(sys.stdout, 'encoding', None)
560
if not output_encoding:
561
input_encoding = getattr(sys.stdin, 'encoding', None)
562
if not input_encoding:
563
output_encoding = get_user_encoding()
565
mutter('encoding stdout as osutils.get_user_encoding() %r',
568
output_encoding = input_encoding
570
mutter('encoding stdout as sys.stdin encoding %r',
574
mutter('encoding stdout as sys.stdout encoding %r', output_encoding)
575
if output_encoding == 'cp0':
576
# invalid encoding (cp0 means 'no codepage' on Windows)
577
output_encoding = get_user_encoding()
579
mutter('cp0 is invalid encoding.'
580
' encoding stdout as osutils.get_user_encoding() %r',
584
codecs.lookup(output_encoding)
586
sys.stderr.write('brz: warning:'
587
' unknown terminal encoding %s.\n'
588
' Using encoding %s instead.\n'
589
% (output_encoding, get_user_encoding())
591
output_encoding = get_user_encoding()
593
return output_encoding
596
def normalizepath(f):
597
if getattr(os.path, 'realpath', None) is not None:
601
[p, e] = os.path.split(f)
602
if e == "" or e == "." or e == "..":
605
return pathjoin(F(p), e)
609
"""True if f is an accessible directory."""
611
return stat.S_ISDIR(os.lstat(f)[stat.ST_MODE])
617
"""True if f is a regular file."""
619
return stat.S_ISREG(os.lstat(f)[stat.ST_MODE])
625
"""True if f is a symlink."""
627
return stat.S_ISLNK(os.lstat(f)[stat.ST_MODE])
632
def is_inside(dir, fname):
633
"""True if fname is inside dir.
635
The parameters should typically be passed to osutils.normpath first, so
636
that . and .. and repeated slashes are eliminated, and the separators
637
are canonical for the platform.
639
The empty string as a dir name is taken as top-of-tree and matches
642
# XXX: Most callers of this can actually do something smarter by
643
# looking at the inventory
650
if isinstance(dir, bytes):
651
if not dir.endswith(b'/'):
654
if not dir.endswith('/'):
657
return fname.startswith(dir)
660
def is_inside_any(dir_list, fname):
661
"""True if fname is inside any of given dirs."""
662
for dirname in dir_list:
663
if is_inside(dirname, fname):
668
def is_inside_or_parent_of_any(dir_list, fname):
669
"""True if fname is a child or a parent of any of the given files."""
670
for dirname in dir_list:
671
if is_inside(dirname, fname) or is_inside(fname, dirname):
676
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
677
report_activity=None, direction='read'):
678
"""Copy contents of one file to another.
680
The read_length can either be -1 to read to end-of-file (EOF) or
681
it can specify the maximum number of bytes to read.
683
The buff_size represents the maximum size for each read operation
684
performed on from_file.
686
:param report_activity: Call this as bytes are read, see
687
Transport._report_activity
688
:param direction: Will be passed to report_activity
690
:return: The number of bytes copied.
694
# read specified number of bytes
696
while read_length > 0:
697
num_bytes_to_read = min(read_length, buff_size)
699
block = from_file.read(num_bytes_to_read)
703
if report_activity is not None:
704
report_activity(len(block), direction)
707
actual_bytes_read = len(block)
708
read_length -= actual_bytes_read
709
length += actual_bytes_read
713
block = from_file.read(buff_size)
717
if report_activity is not None:
718
report_activity(len(block), direction)
724
def pump_string_file(bytes, file_handle, segment_size=None):
725
"""Write bytes to file_handle in many smaller writes.
727
:param bytes: The string to write.
728
:param file_handle: The file to write to.
730
# Write data in chunks rather than all at once, because very large
731
# writes fail on some platforms (e.g. Windows with SMB mounted
734
segment_size = 5242880 # 5MB
735
offsets = range(0, len(bytes), segment_size)
736
view = memoryview(bytes)
737
write = file_handle.write
738
for offset in offsets:
739
write(view[offset:offset + segment_size])
742
def file_iterator(input_file, readsize=32768):
744
b = input_file.read(readsize)
750
# GZ 2017-09-16: Makes sense in general for hexdigest() result to be text, but
751
# used as bytes through most interfaces so encode with this wrapper.
753
def _hexdigest(hashobj):
754
return hashobj.hexdigest().encode()
756
def _hexdigest(hashobj):
757
return hashobj.hexdigest()
761
"""Calculate the hexdigest of an open file.
763
The file cursor should be already at the start.
775
def size_sha_file(f):
776
"""Calculate the size and hexdigest of an open file.
778
The file cursor should be already at the start and
779
the caller is responsible for closing the file afterwards.
790
return size, _hexdigest(s)
793
def sha_file_by_name(fname):
794
"""Calculate the SHA1 of a file by reading the full text"""
796
f = os.open(fname, os.O_RDONLY | O_BINARY | O_NOINHERIT)
799
b = os.read(f, 1 << 16)
807
def sha_strings(strings, _factory=sha):
808
"""Return the sha-1 of concatenation of strings"""
810
for string in strings:
815
def sha_string(f, _factory=sha):
816
# GZ 2017-09-16: Dodgy if factory is ever not sha, probably shouldn't be.
817
return _hexdigest(_factory(f))
820
def fingerprint_file(f):
822
return {'size': len(b),
823
'sha1': _hexdigest(sha(b))}
826
def compare_files(a, b):
827
"""Returns true if equal in contents"""
838
def local_time_offset(t=None):
839
"""Return offset of local zone from GMT, either at present or at time t."""
842
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
843
return offset.days * 86400 + offset.seconds
846
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
847
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
850
def format_date(t, offset=0, timezone='original', date_fmt=None,
852
"""Return a formatted date string.
854
:param t: Seconds since the epoch.
855
:param offset: Timezone offset in seconds east of utc.
856
:param timezone: How to display the time: 'utc', 'original' for the
857
timezone specified by offset, or 'local' for the process's current
859
:param date_fmt: strftime format.
860
:param show_offset: Whether to append the timezone.
862
(date_fmt, tt, offset_str) = \
863
_format_date(t, offset, timezone, date_fmt, show_offset)
864
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
865
date_str = time.strftime(date_fmt, tt)
866
return date_str + offset_str
869
# Cache of formatted offset strings
873
def format_date_with_offset_in_original_timezone(t, offset=0,
874
_cache=_offset_cache):
875
"""Return a formatted date string in the original timezone.
877
This routine may be faster then format_date.
879
:param t: Seconds since the epoch.
880
:param offset: Timezone offset in seconds east of utc.
884
tt = time.gmtime(t + offset)
885
date_fmt = _default_format_by_weekday_num[tt[6]]
886
date_str = time.strftime(date_fmt, tt)
887
offset_str = _cache.get(offset, None)
888
if offset_str is None:
889
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
890
_cache[offset] = offset_str
891
return date_str + offset_str
894
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
896
"""Return an unicode date string formatted according to the current locale.
898
:param t: Seconds since the epoch.
899
:param offset: Timezone offset in seconds east of utc.
900
:param timezone: How to display the time: 'utc', 'original' for the
901
timezone specified by offset, or 'local' for the process's current
903
:param date_fmt: strftime format.
904
:param show_offset: Whether to append the timezone.
906
(date_fmt, tt, offset_str) = \
907
_format_date(t, offset, timezone, date_fmt, show_offset)
908
date_str = time.strftime(date_fmt, tt)
909
if not isinstance(date_str, text_type):
910
date_str = date_str.decode(get_user_encoding(), 'replace')
911
return date_str + offset_str
914
def _format_date(t, offset, timezone, date_fmt, show_offset):
915
if timezone == 'utc':
918
elif timezone == 'original':
921
tt = time.gmtime(t + offset)
922
elif timezone == 'local':
923
tt = time.localtime(t)
924
offset = local_time_offset(t)
926
raise UnsupportedTimezoneFormat(timezone)
928
date_fmt = "%a %Y-%m-%d %H:%M:%S"
930
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
933
return (date_fmt, tt, offset_str)
936
def compact_date(when):
937
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
940
def format_delta(delta):
941
"""Get a nice looking string for a time delta.
943
:param delta: The time difference in seconds, can be positive or negative.
944
positive indicates time in the past, negative indicates time in the
945
future. (usually time.time() - stored_time)
946
:return: String formatted to show approximate resolution
952
direction = 'in the future'
956
if seconds < 90: # print seconds up to 90 seconds
958
return '%d second %s' % (seconds, direction,)
960
return '%d seconds %s' % (seconds, direction)
962
minutes = int(seconds / 60)
963
seconds -= 60 * minutes
968
if minutes < 90: # print minutes, seconds up to 90 minutes
970
return '%d minute, %d second%s %s' % (
971
minutes, seconds, plural_seconds, direction)
973
return '%d minutes, %d second%s %s' % (
974
minutes, seconds, plural_seconds, direction)
976
hours = int(minutes / 60)
977
minutes -= 60 * hours
984
return '%d hour, %d minute%s %s' % (hours, minutes,
985
plural_minutes, direction)
986
return '%d hours, %d minute%s %s' % (hours, minutes,
987
plural_minutes, direction)
991
"""Return size of given open file."""
992
return os.fstat(f.fileno())[stat.ST_SIZE]
995
# Alias os.urandom to support platforms (which?) without /dev/urandom and
996
# override if it doesn't work. Avoid checking on windows where there is
997
# significant initialisation cost that can be avoided for some bzr calls.
999
rand_bytes = os.urandom
1001
if rand_bytes.__module__ != "nt":
1004
except NotImplementedError:
1005
# not well seeded, but better than nothing
1010
s += chr(random.randint(0, 255))
1015
ALNUM = '0123456789abcdefghijklmnopqrstuvwxyz'
1018
def rand_chars(num):
1019
"""Return a random string of num alphanumeric characters
1021
The result only contains lowercase chars because it may be used on
1022
case-insensitive filesystems.
1025
for raw_byte in rand_bytes(num):
1027
s += ALNUM[ord(raw_byte) % 36]
1029
s += ALNUM[raw_byte % 36]
1033
# TODO: We could later have path objects that remember their list
1034
# decomposition (might be too tricksy though.)
1037
"""Turn string into list of parts."""
1038
# split on either delimiter because people might use either on
1040
if isinstance(p, bytes):
1041
ps = re.split(b'[\\\\/]', p)
1043
ps = re.split(r'[\\/]', p)
1047
if f in ('..', b'..'):
1048
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1049
elif f in ('.', '', b'.', b''):
1058
if (f == '..') or (f is None) or (f == ''):
1059
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1063
def parent_directories(filename):
1064
"""Return the list of parent directories, deepest first.
1066
For example, parent_directories("a/b/c") -> ["a/b", "a"].
1069
parts = splitpath(dirname(filename))
1071
parents.append(joinpath(parts))
1076
_extension_load_failures = []
1079
def failed_to_load_extension(exception):
1080
"""Handle failing to load a binary extension.
1082
This should be called from the ImportError block guarding the attempt to
1083
import the native extension. If this function returns, the pure-Python
1084
implementation should be loaded instead::
1087
>>> import breezy._fictional_extension_pyx
1088
>>> except ImportError, e:
1089
>>> breezy.osutils.failed_to_load_extension(e)
1090
>>> import breezy._fictional_extension_py
1092
# NB: This docstring is just an example, not a doctest, because doctest
1093
# currently can't cope with the use of lazy imports in this namespace --
1096
# This currently doesn't report the failure at the time it occurs, because
1097
# they tend to happen very early in startup when we can't check config
1098
# files etc, and also we want to report all failures but not spam the user
1100
exception_str = str(exception)
1101
if exception_str not in _extension_load_failures:
1102
trace.mutter("failed to load compiled extension: %s" % exception_str)
1103
_extension_load_failures.append(exception_str)
1106
def report_extension_load_failures():
1107
if not _extension_load_failures:
1109
if config.GlobalConfig().suppress_warning('missing_extensions'):
1111
# the warnings framework should by default show this only once
1112
from .trace import warning
1114
"brz: warning: some compiled extensions could not be loaded; "
1115
"see ``brz help missing-extensions``")
1116
# we no longer show the specific missing extensions here, because it makes
1117
# the message too long and scary - see
1118
# https://bugs.launchpad.net/bzr/+bug/430529
1122
from ._chunks_to_lines_pyx import chunks_to_lines
1123
except ImportError as e:
1124
failed_to_load_extension(e)
1125
from ._chunks_to_lines_py import chunks_to_lines
1129
"""Split s into lines, but without removing the newline characters."""
1130
# Trivially convert a fulltext into a 'chunked' representation, and let
1131
# chunks_to_lines do the heavy lifting.
1132
if isinstance(s, bytes):
1133
# chunks_to_lines only supports 8-bit strings
1134
return chunks_to_lines([s])
1136
return _split_lines(s)
1139
def _split_lines(s):
1140
"""Split s into lines, but without removing the newline characters.
1142
This supports Unicode or plain string objects.
1144
nl = b'\n' if isinstance(s, bytes) else u'\n'
1146
result = [line + nl for line in lines[:-1]]
1148
result.append(lines[-1])
1152
def hardlinks_good():
1153
return sys.platform not in ('win32', 'cygwin', 'darwin')
1156
def link_or_copy(src, dest):
1157
"""Hardlink a file, or copy it if it can't be hardlinked."""
1158
if not hardlinks_good():
1159
shutil.copyfile(src, dest)
1163
except (OSError, IOError) as e:
1164
if e.errno != errno.EXDEV:
1166
shutil.copyfile(src, dest)
1169
def delete_any(path):
1170
"""Delete a file, symlink or directory.
1172
Will delete even if readonly.
1175
_delete_file_or_dir(path)
1176
except (OSError, IOError) as e:
1177
if e.errno in (errno.EPERM, errno.EACCES):
1178
# make writable and try again
1181
except (OSError, IOError):
1183
_delete_file_or_dir(path)
1188
def _delete_file_or_dir(path):
1189
# Look Before You Leap (LBYL) is appropriate here instead of Easier to Ask for
1190
# Forgiveness than Permission (EAFP) because:
1191
# - root can damage a solaris file system by using unlink,
1192
# - unlink raises different exceptions on different OSes (linux: EISDIR, win32:
1193
# EACCES, OSX: EPERM) when invoked on a directory.
1194
if isdir(path): # Takes care of symlinks
1201
if getattr(os, 'symlink', None) is not None:
1207
def has_hardlinks():
1208
if getattr(os, 'link', None) is not None:
1214
def host_os_dereferences_symlinks():
1215
return (has_symlinks()
1216
and sys.platform not in ('cygwin', 'win32'))
1219
def readlink(abspath):
1220
"""Return a string representing the path to which the symbolic link points.
1222
:param abspath: The link absolute unicode path.
1224
This his guaranteed to return the symbolic link in unicode in all python
1227
link = abspath.encode(_fs_enc)
1228
target = os.readlink(link)
1229
target = target.decode(_fs_enc)
1233
def contains_whitespace(s):
1234
"""True if there are any whitespace characters in s."""
1235
# string.whitespace can include '\xa0' in certain locales, because it is
1236
# considered "non-breaking-space" as part of ISO-8859-1. But it
1237
# 1) Isn't a breaking whitespace
1238
# 2) Isn't one of ' \t\r\n' which are characters we sometimes use as
1240
# 3) '\xa0' isn't unicode safe since it is >128.
1242
if isinstance(s, str):
1245
ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f')
1253
def contains_linebreaks(s):
1254
"""True if there is any vertical whitespace in s."""
1262
def relpath(base, path):
1263
"""Return path relative to base, or raise PathNotChild exception.
1265
The path may be either an absolute path or a path relative to the
1266
current working directory.
1268
os.path.commonprefix (python2.4) has a bad bug that it works just
1269
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
1270
avoids that problem.
1272
NOTE: `base` should not have a trailing slash otherwise you'll get
1273
PathNotChild exceptions regardless of `path`.
1276
if len(base) < MIN_ABS_PATHLENGTH:
1277
# must have space for e.g. a drive letter
1278
raise ValueError(gettext('%r is too short to calculate a relative path')
1286
if len(head) <= len(base) and head != base:
1287
raise errors.PathNotChild(rp, base)
1290
head, tail = split(head)
1295
return pathjoin(*reversed(s))
1300
def _cicp_canonical_relpath(base, path):
1301
"""Return the canonical path relative to base.
1303
Like relpath, but on case-insensitive-case-preserving file-systems, this
1304
will return the relpath as stored on the file-system rather than in the
1305
case specified in the input string, for all existing portions of the path.
1307
This will cause O(N) behaviour if called for every path in a tree; if you
1308
have a number of paths to convert, you should use canonical_relpaths().
1310
# TODO: it should be possible to optimize this for Windows by using the
1311
# win32 API FindFiles function to look for the specified name - but using
1312
# os.listdir() still gives us the correct, platform agnostic semantics in
1315
rel = relpath(base, path)
1316
# '.' will have been turned into ''
1320
abs_base = abspath(base)
1322
_listdir = os.listdir
1324
# use an explicit iterator so we can easily consume the rest on early exit.
1325
bit_iter = iter(rel.split('/'))
1326
for bit in bit_iter:
1329
next_entries = _listdir(current)
1330
except OSError: # enoent, eperm, etc
1331
# We can't find this in the filesystem, so just append the
1333
current = pathjoin(current, bit, *list(bit_iter))
1335
for look in next_entries:
1336
if lbit == look.lower():
1337
current = pathjoin(current, look)
1340
# got to the end, nothing matched, so we just return the
1341
# non-existing bits as they were specified (the filename may be
1342
# the target of a move, for example).
1343
current = pathjoin(current, bit, *list(bit_iter))
1345
return current[len(abs_base):].lstrip('/')
1348
# XXX - TODO - we need better detection/integration of case-insensitive
1349
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1350
# filesystems), for example, so could probably benefit from the same basic
1351
# support there. For now though, only Windows and OSX get that support, and
1352
# they get it for *all* file-systems!
1353
if sys.platform in ('win32', 'darwin'):
1354
canonical_relpath = _cicp_canonical_relpath
1356
canonical_relpath = relpath
1359
def canonical_relpaths(base, paths):
1360
"""Create an iterable to canonicalize a sequence of relative paths.
1362
The intent is for this implementation to use a cache, vastly speeding
1363
up multiple transformations in the same directory.
1365
# but for now, we haven't optimized...
1366
return [canonical_relpath(base, p) for p in paths]
1369
def decode_filename(filename):
1370
"""Decode the filename using the filesystem encoding
1372
If it is unicode, it is returned.
1373
Otherwise it is decoded from the the filesystem's encoding. If decoding
1374
fails, a errors.BadFilenameEncoding exception is raised.
1376
if isinstance(filename, text_type):
1379
return filename.decode(_fs_enc)
1380
except UnicodeDecodeError:
1381
raise errors.BadFilenameEncoding(filename, _fs_enc)
1384
def safe_unicode(unicode_or_utf8_string):
1385
"""Coerce unicode_or_utf8_string into unicode.
1387
If it is unicode, it is returned.
1388
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1389
wrapped in a BzrBadParameterNotUnicode exception.
1391
if isinstance(unicode_or_utf8_string, text_type):
1392
return unicode_or_utf8_string
1394
return unicode_or_utf8_string.decode('utf8')
1395
except UnicodeDecodeError:
1396
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1399
def safe_utf8(unicode_or_utf8_string):
1400
"""Coerce unicode_or_utf8_string to a utf8 string.
1402
If it is a str, it is returned.
1403
If it is Unicode, it is encoded into a utf-8 string.
1405
if isinstance(unicode_or_utf8_string, bytes):
1406
# TODO: jam 20070209 This is overkill, and probably has an impact on
1407
# performance if we are dealing with lots of apis that want a
1410
# Make sure it is a valid utf-8 string
1411
unicode_or_utf8_string.decode('utf-8')
1412
except UnicodeDecodeError:
1413
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1414
return unicode_or_utf8_string
1415
return unicode_or_utf8_string.encode('utf-8')
1418
def safe_revision_id(unicode_or_utf8_string):
1419
"""Revision ids should now be utf8, but at one point they were unicode.
1421
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1423
:return: None or a utf8 revision id.
1425
if (unicode_or_utf8_string is None
1426
or unicode_or_utf8_string.__class__ == bytes):
1427
return unicode_or_utf8_string
1428
raise TypeError('Unicode revision ids are no longer supported. '
1429
'Revision id generators should be creating utf8 revision '
1433
def safe_file_id(unicode_or_utf8_string):
1434
"""File ids should now be utf8, but at one point they were unicode.
1436
This is the same as safe_utf8, except it uses the cached encode functions
1437
to save a little bit of performance.
1439
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1441
:return: None or a utf8 file id.
1443
if (unicode_or_utf8_string is None
1444
or unicode_or_utf8_string.__class__ == bytes):
1445
return unicode_or_utf8_string
1446
raise TypeError('Unicode file ids are no longer supported. '
1447
'File id generators should be creating utf8 file ids.')
1450
_platform_normalizes_filenames = False
1451
if sys.platform == 'darwin':
1452
_platform_normalizes_filenames = True
1455
def normalizes_filenames():
1456
"""Return True if this platform normalizes unicode filenames.
1460
return _platform_normalizes_filenames
1463
def _accessible_normalized_filename(path):
1464
"""Get the unicode normalized path, and if you can access the file.
1466
On platforms where the system normalizes filenames (Mac OSX),
1467
you can access a file by any path which will normalize correctly.
1468
On platforms where the system does not normalize filenames
1469
(everything else), you have to access a file by its exact path.
1471
Internally, bzr only supports NFC normalization, since that is
1472
the standard for XML documents.
1474
So return the normalized path, and a flag indicating if the file
1475
can be accessed by that path.
1478
if isinstance(path, bytes):
1479
path = path.decode(sys.getfilesystemencoding())
1480
return unicodedata.normalize('NFC', path), True
1483
def _inaccessible_normalized_filename(path):
1484
__doc__ = _accessible_normalized_filename.__doc__
1486
if isinstance(path, bytes):
1487
path = path.decode(sys.getfilesystemencoding())
1488
normalized = unicodedata.normalize('NFC', path)
1489
return normalized, normalized == path
1492
if _platform_normalizes_filenames:
1493
normalized_filename = _accessible_normalized_filename
1495
normalized_filename = _inaccessible_normalized_filename
1498
def set_signal_handler(signum, handler, restart_syscall=True):
1499
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1500
on platforms that support that.
1502
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1503
automatically restart (by calling `signal.siginterrupt(signum,
1504
False)`). May be ignored if the feature is not available on this
1505
platform or Python version.
1509
siginterrupt = signal.siginterrupt
1511
# This python implementation doesn't provide signal support, hence no
1514
except AttributeError:
1515
# siginterrupt doesn't exist on this platform, or for this version
1517
def siginterrupt(signum, flag): return None
1519
def sig_handler(*args):
1520
# Python resets the siginterrupt flag when a signal is
1521
# received. <http://bugs.python.org/issue8354>
1522
# As a workaround for some cases, set it back the way we want it.
1523
siginterrupt(signum, False)
1524
# Now run the handler function passed to set_signal_handler.
1527
sig_handler = handler
1528
old_handler = signal.signal(signum, sig_handler)
1530
siginterrupt(signum, False)
1534
default_terminal_width = 80
1535
"""The default terminal width for ttys.
1537
This is defined so that higher levels can share a common fallback value when
1538
terminal_width() returns None.
1541
# Keep some state so that terminal_width can detect if _terminal_size has
1542
# returned a different size since the process started. See docstring and
1543
# comments of terminal_width for details.
1544
# _terminal_size_state has 3 possible values: no_data, unchanged, and changed.
1545
_terminal_size_state = 'no_data'
1546
_first_terminal_size = None
1549
def terminal_width():
1550
"""Return terminal width.
1552
None is returned if the width can't established precisely.
1555
- if BRZ_COLUMNS is set, returns its value
1556
- if there is no controlling terminal, returns None
1557
- query the OS, if the queried size has changed since the last query,
1559
- if COLUMNS is set, returns its value,
1560
- if the OS has a value (even though it's never changed), return its value.
1562
From there, we need to query the OS to get the size of the controlling
1565
On Unices we query the OS by:
1566
- get termios.TIOCGWINSZ
1567
- if an error occurs or a negative value is obtained, returns None
1569
On Windows we query the OS by:
1570
- win32utils.get_console_size() decides,
1571
- returns None on error (provided default value)
1573
# Note to implementors: if changing the rules for determining the width,
1574
# make sure you've considered the behaviour in these cases:
1575
# - M-x shell in emacs, where $COLUMNS is set and TIOCGWINSZ returns 0,0.
1576
# - brz log | less, in bash, where $COLUMNS not set and TIOCGWINSZ returns
1578
# - (add more interesting cases here, if you find any)
1579
# Some programs implement "Use $COLUMNS (if set) until SIGWINCH occurs",
1580
# but we don't want to register a signal handler because it is impossible
1581
# to do so without risking EINTR errors in Python <= 2.6.5 (see
1582
# <http://bugs.python.org/issue8354>). Instead we check TIOCGWINSZ every
1583
# time so we can notice if the reported size has changed, which should have
1586
# If BRZ_COLUMNS is set, take it, user is always right
1587
# Except if they specified 0 in which case, impose no limit here
1589
width = int(os.environ['BRZ_COLUMNS'])
1590
except (KeyError, ValueError):
1592
if width is not None:
1598
isatty = getattr(sys.stdout, 'isatty', None)
1599
if isatty is None or not isatty():
1600
# Don't guess, setting BRZ_COLUMNS is the recommended way to override.
1604
width, height = os_size = _terminal_size(None, None)
1605
global _first_terminal_size, _terminal_size_state
1606
if _terminal_size_state == 'no_data':
1607
_first_terminal_size = os_size
1608
_terminal_size_state = 'unchanged'
1609
elif (_terminal_size_state == 'unchanged' and
1610
_first_terminal_size != os_size):
1611
_terminal_size_state = 'changed'
1613
# If the OS claims to know how wide the terminal is, and this value has
1614
# ever changed, use that.
1615
if _terminal_size_state == 'changed':
1616
if width is not None and width > 0:
1619
# If COLUMNS is set, use it.
1621
return int(os.environ['COLUMNS'])
1622
except (KeyError, ValueError):
1625
# Finally, use an unchanged size from the OS, if we have one.
1626
if _terminal_size_state == 'unchanged':
1627
if width is not None and width > 0:
1630
# The width could not be determined.
1634
def _win32_terminal_size(width, height):
1635
width, height = win32utils.get_console_size(
1636
defaultx=width, defaulty=height)
1637
return width, height
1640
def _ioctl_terminal_size(width, height):
1645
s = struct.pack('HHHH', 0, 0, 0, 0)
1646
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
1647
height, width = struct.unpack('HHHH', x)[0:2]
1648
except (IOError, AttributeError):
1650
return width, height
1653
_terminal_size = None
1654
"""Returns the terminal size as (width, height).
1656
:param width: Default value for width.
1657
:param height: Default value for height.
1659
This is defined specifically for each OS and query the size of the controlling
1660
terminal. If any error occurs, the provided default values should be returned.
1662
if sys.platform == 'win32':
1663
_terminal_size = _win32_terminal_size
1665
_terminal_size = _ioctl_terminal_size
1668
def supports_executable():
1669
return sys.platform != "win32"
1672
def supports_posix_readonly():
1673
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1675
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1676
directory controls creation/deletion, etc.
1678
And under win32, readonly means that the directory itself cannot be
1679
deleted. The contents of a readonly directory can be changed, unlike POSIX
1680
where files in readonly directories cannot be added, deleted or renamed.
1682
return sys.platform != "win32"
1685
def set_or_unset_env(env_variable, value):
1686
"""Modify the environment, setting or removing the env_variable.
1688
:param env_variable: The environment variable in question
1689
:param value: The value to set the environment to. If None, then
1690
the variable will be removed.
1691
:return: The original value of the environment variable.
1693
orig_val = os.environ.get(env_variable)
1695
if orig_val is not None:
1696
del os.environ[env_variable]
1698
if not PY3 and isinstance(value, text_type):
1699
value = value.encode(get_user_encoding())
1700
os.environ[env_variable] = value
1704
_validWin32PathRE = re.compile(r'^([A-Za-z]:[/\\])?[^:<>*"?\|]*$')
1707
def check_legal_path(path):
1708
"""Check whether the supplied path is legal.
1709
This is only required on Windows, so we don't test on other platforms
1712
if sys.platform != "win32":
1714
if _validWin32PathRE.match(path) is None:
1715
raise errors.IllegalPath(path)
1718
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1721
def _is_error_enotdir(e):
1722
"""Check if this exception represents ENOTDIR.
1724
Unfortunately, python is very inconsistent about the exception
1725
here. The cases are:
1726
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1727
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1728
which is the windows error code.
1729
3) Windows, Python2.5 uses errno == EINVAL and
1730
winerror == ERROR_DIRECTORY
1732
:param e: An Exception object (expected to be OSError with an errno
1733
attribute, but we should be able to cope with anything)
1734
:return: True if this represents an ENOTDIR error. False otherwise.
1736
en = getattr(e, 'errno', None)
1737
if (en == errno.ENOTDIR or
1738
(sys.platform == 'win32' and
1739
(en == _WIN32_ERROR_DIRECTORY or
1741
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
1747
def walkdirs(top, prefix=""):
1748
"""Yield data about all the directories in a tree.
1750
This yields all the data about the contents of a directory at a time.
1751
After each directory has been yielded, if the caller has mutated the list
1752
to exclude some directories, they are then not descended into.
1754
The data yielded is of the form:
1755
((directory-relpath, directory-path-from-top),
1756
[(relpath, basename, kind, lstat, path-from-top), ...]),
1757
- directory-relpath is the relative path of the directory being returned
1758
with respect to top. prefix is prepended to this.
1759
- directory-path-from-root is the path including top for this directory.
1760
It is suitable for use with os functions.
1761
- relpath is the relative path within the subtree being walked.
1762
- basename is the basename of the path
1763
- kind is the kind of the file now. If unknown then the file is not
1764
present within the tree - but it may be recorded as versioned. See
1766
- lstat is the stat data *if* the file was statted.
1767
- planned, not implemented:
1768
path_from_tree_root is the path from the root of the tree.
1770
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1771
allows one to walk a subtree but get paths that are relative to a tree
1773
:return: an iterator over the dirs.
1775
# TODO there is a bit of a smell where the results of the directory-
1776
# summary in this, and the path from the root, may not agree
1777
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
1778
# potentially confusing output. We should make this more robust - but
1779
# not at a speed cost. RBC 20060731
1781
_directory = _directory_kind
1782
_listdir = os.listdir
1783
_kind_from_mode = file_kind_from_stat_mode
1784
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
1786
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1787
relroot, _, _, _, top = pending.pop()
1789
relprefix = relroot + u'/'
1792
top_slash = top + u'/'
1795
append = dirblock.append
1797
names = sorted(map(decode_filename, _listdir(top)))
1798
except OSError as e:
1799
if not _is_error_enotdir(e):
1803
abspath = top_slash + name
1804
statvalue = _lstat(abspath)
1805
kind = _kind_from_mode(statvalue.st_mode)
1806
append((relprefix + name, name, kind, statvalue, abspath))
1807
yield (relroot, top), dirblock
1809
# push the user specified dirs from dirblock
1810
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1813
class DirReader(object):
1814
"""An interface for reading directories."""
1816
def top_prefix_to_starting_dir(self, top, prefix=""):
1817
"""Converts top and prefix to a starting dir entry
1819
:param top: A utf8 path
1820
:param prefix: An optional utf8 path to prefix output relative paths
1822
:return: A tuple starting with prefix, and ending with the native
1825
raise NotImplementedError(self.top_prefix_to_starting_dir)
1827
def read_dir(self, prefix, top):
1828
"""Read a specific dir.
1830
:param prefix: A utf8 prefix to be preprended to the path basenames.
1831
:param top: A natively encoded path to read.
1832
:return: A list of the directories contents. Each item contains:
1833
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1835
raise NotImplementedError(self.read_dir)
1838
_selected_dir_reader = None
1841
def _walkdirs_utf8(top, prefix=""):
1842
"""Yield data about all the directories in a tree.
1844
This yields the same information as walkdirs() only each entry is yielded
1845
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1846
are returned as exact byte-strings.
1848
:return: yields a tuple of (dir_info, [file_info])
1849
dir_info is (utf8_relpath, path-from-top)
1850
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1851
if top is an absolute path, path-from-top is also an absolute path.
1852
path-from-top might be unicode or utf8, but it is the correct path to
1853
pass to os functions to affect the file in question. (such as os.lstat)
1855
global _selected_dir_reader
1856
if _selected_dir_reader is None:
1857
if sys.platform == "win32":
1859
from ._walkdirs_win32 import Win32ReadDir
1860
_selected_dir_reader = Win32ReadDir()
1863
elif _fs_enc in ('utf-8', 'ascii'):
1865
from ._readdir_pyx import UTF8DirReader
1866
_selected_dir_reader = UTF8DirReader()
1867
except ImportError as e:
1868
failed_to_load_extension(e)
1871
if _selected_dir_reader is None:
1872
# Fallback to the python version
1873
_selected_dir_reader = UnicodeDirReader()
1875
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1876
# But we don't actually uses 1-3 in pending, so set them to None
1877
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1878
read_dir = _selected_dir_reader.read_dir
1879
_directory = _directory_kind
1881
relroot, _, _, _, top = pending[-1].pop()
1884
dirblock = sorted(read_dir(relroot, top))
1885
yield (relroot, top), dirblock
1886
# push the user specified dirs from dirblock
1887
next = [d for d in reversed(dirblock) if d[2] == _directory]
1889
pending.append(next)
1892
class UnicodeDirReader(DirReader):
1893
"""A dir reader for non-utf8 file systems, which transcodes."""
1895
__slots__ = ['_utf8_encode']
1898
self._utf8_encode = codecs.getencoder('utf8')
1900
def top_prefix_to_starting_dir(self, top, prefix=""):
1901
"""See DirReader.top_prefix_to_starting_dir."""
1902
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1904
def read_dir(self, prefix, top):
1905
"""Read a single directory from a non-utf8 file system.
1907
top, and the abspath element in the output are unicode, all other paths
1908
are utf8. Local disk IO is done via unicode calls to listdir etc.
1910
This is currently the fallback code path when the filesystem encoding is
1911
not UTF-8. It may be better to implement an alternative so that we can
1912
safely handle paths that are not properly decodable in the current
1915
See DirReader.read_dir for details.
1917
_utf8_encode = self._utf8_encode
1919
def _fs_decode(s): return s.decode(_fs_enc)
1921
def _fs_encode(s): return s.encode(_fs_enc)
1923
_listdir = os.listdir
1924
_kind_from_mode = file_kind_from_stat_mode
1927
relprefix = prefix + b'/'
1930
top_slash = top + '/'
1933
append = dirblock.append
1934
for name_native in _listdir(top.encode('utf-8')):
1936
name = _fs_decode(name_native)
1937
except UnicodeDecodeError:
1938
raise errors.BadFilenameEncoding(
1939
relprefix + name_native, _fs_enc)
1940
name_utf8 = _utf8_encode(name)[0]
1941
abspath = top_slash + name
1942
statvalue = _lstat(abspath)
1943
kind = _kind_from_mode(statvalue.st_mode)
1944
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
1945
return sorted(dirblock)
1948
def copy_tree(from_path, to_path, handlers={}):
1949
"""Copy all of the entries in from_path into to_path.
1951
:param from_path: The base directory to copy.
1952
:param to_path: The target directory. If it does not exist, it will
1954
:param handlers: A dictionary of functions, which takes a source and
1955
destinations for files, directories, etc.
1956
It is keyed on the file kind, such as 'directory', 'symlink', or 'file'
1957
'file', 'directory', and 'symlink' should always exist.
1958
If they are missing, they will be replaced with 'os.mkdir()',
1959
'os.readlink() + os.symlink()', and 'shutil.copy2()', respectively.
1961
# Now, just copy the existing cached tree to the new location
1962
# We use a cheap trick here.
1963
# Absolute paths are prefixed with the first parameter
1964
# relative paths are prefixed with the second.
1965
# So we can get both the source and target returned
1966
# without any extra work.
1968
def copy_dir(source, dest):
1971
def copy_link(source, dest):
1972
"""Copy the contents of a symlink"""
1973
link_to = os.readlink(source)
1974
os.symlink(link_to, dest)
1976
real_handlers = {'file': shutil.copy2,
1977
'symlink': copy_link,
1978
'directory': copy_dir,
1980
real_handlers.update(handlers)
1982
if not os.path.exists(to_path):
1983
real_handlers['directory'](from_path, to_path)
1985
for dir_info, entries in walkdirs(from_path, prefix=to_path):
1986
for relpath, name, kind, st, abspath in entries:
1987
real_handlers[kind](abspath, relpath)
1990
def copy_ownership_from_path(dst, src=None):
1991
"""Copy usr/grp ownership from src file/dir to dst file/dir.
1993
If src is None, the containing directory is used as source. If chown
1994
fails, the error is ignored and a warning is printed.
1996
chown = getattr(os, 'chown', None)
2001
src = os.path.dirname(dst)
2007
chown(dst, s.st_uid, s.st_gid)
2010
'Unable to copy ownership from "%s" to "%s". '
2011
'You may want to set it manually.', src, dst)
2012
trace.log_exception_quietly()
2015
def path_prefix_key(path):
2016
"""Generate a prefix-order path key for path.
2018
This can be used to sort paths in the same way that walkdirs does.
2020
return (dirname(path), path)
2023
def compare_paths_prefix_order(path_a, path_b):
2024
"""Compare path_a and path_b to generate the same order walkdirs uses."""
2025
key_a = path_prefix_key(path_a)
2026
key_b = path_prefix_key(path_b)
2027
return (key_a > key_b) - (key_a < key_b)
2030
_cached_user_encoding = None
2033
def get_user_encoding():
2034
"""Find out what the preferred user encoding is.
2036
This is generally the encoding that is used for command line parameters
2037
and file contents. This may be different from the terminal encoding
2038
or the filesystem encoding.
2040
:return: A string defining the preferred user encoding
2042
global _cached_user_encoding
2043
if _cached_user_encoding is not None:
2044
return _cached_user_encoding
2046
if os.name == 'posix' and getattr(locale, 'CODESET', None) is not None:
2047
# Use the existing locale settings and call nl_langinfo directly
2048
# rather than going through getpreferredencoding. This avoids
2049
# <http://bugs.python.org/issue6202> on OSX Python 2.6 and the
2050
# possibility of the setlocale call throwing an error.
2051
user_encoding = locale.nl_langinfo(locale.CODESET)
2053
# GZ 2011-12-19: On windows could call GetACP directly instead.
2054
user_encoding = locale.getpreferredencoding(False)
2057
user_encoding = codecs.lookup(user_encoding).name
2059
if user_encoding not in ("", "cp0"):
2060
sys.stderr.write('brz: warning:'
2061
' unknown encoding %s.'
2062
' Continuing with ascii encoding.\n'
2065
user_encoding = 'ascii'
2067
# Get 'ascii' when setlocale has not been called or LANG=C or unset.
2068
if user_encoding == 'ascii':
2069
if sys.platform == 'darwin':
2070
# OSX is special-cased in Python to have a UTF-8 filesystem
2071
# encoding and previously had LANG set here if not present.
2072
user_encoding = 'utf-8'
2073
# GZ 2011-12-19: Maybe UTF-8 should be the default in this case
2074
# for some other posix platforms as well.
2076
_cached_user_encoding = user_encoding
2077
return user_encoding
2080
def get_diff_header_encoding():
2081
return get_terminal_encoding()
2084
def get_host_name():
2085
"""Return the current unicode host name.
2087
This is meant to be used in place of socket.gethostname() because that
2088
behaves inconsistently on different platforms.
2090
if sys.platform == "win32":
2091
return win32utils.get_host_name()
2095
return socket.gethostname()
2096
return socket.gethostname().decode(get_user_encoding())
2099
# We must not read/write any more than 64k at a time from/to a socket so we
2100
# don't risk "no buffer space available" errors on some platforms. Windows in
2101
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
2103
MAX_SOCKET_CHUNK = 64 * 1024
2105
_end_of_stream_errors = [errno.ECONNRESET, errno.EPIPE, errno.EINVAL]
2106
for _eno in ['WSAECONNRESET', 'WSAECONNABORTED']:
2107
_eno = getattr(errno, _eno, None)
2108
if _eno is not None:
2109
_end_of_stream_errors.append(_eno)
2113
def read_bytes_from_socket(sock, report_activity=None,
2114
max_read_size=MAX_SOCKET_CHUNK):
2115
"""Read up to max_read_size of bytes from sock and notify of progress.
2117
Translates "Connection reset by peer" into file-like EOF (return an
2118
empty string rather than raise an error), and repeats the recv if
2119
interrupted by a signal.
2123
data = sock.recv(max_read_size)
2124
except socket.error as e:
2126
if eno in _end_of_stream_errors:
2127
# The connection was closed by the other side. Callers expect
2128
# an empty string to signal end-of-stream.
2130
elif eno == errno.EINTR:
2131
# Retry the interrupted recv.
2135
if report_activity is not None:
2136
report_activity(len(data), 'read')
2140
def recv_all(socket, count):
2141
"""Receive an exact number of bytes.
2143
Regular Socket.recv() may return less than the requested number of bytes,
2144
depending on what's in the OS buffer. MSG_WAITALL is not available
2145
on all platforms, but this should work everywhere. This will return
2146
less than the requested amount if the remote end closes.
2148
This isn't optimized and is intended mostly for use in testing.
2151
while len(b) < count:
2152
new = read_bytes_from_socket(socket, None, count - len(b))
2159
def send_all(sock, bytes, report_activity=None):
2160
"""Send all bytes on a socket.
2162
Breaks large blocks in smaller chunks to avoid buffering limitations on
2163
some platforms, and catches EINTR which may be thrown if the send is
2164
interrupted by a signal.
2166
This is preferred to socket.sendall(), because it avoids portability bugs
2167
and provides activity reporting.
2169
:param report_activity: Call this as bytes are read, see
2170
Transport._report_activity
2173
byte_count = len(bytes)
2174
view = memoryview(bytes)
2175
while sent_total < byte_count:
2177
sent = sock.send(view[sent_total:sent_total + MAX_SOCKET_CHUNK])
2178
except (socket.error, IOError) as e:
2179
if e.args[0] in _end_of_stream_errors:
2180
raise errors.ConnectionReset(
2181
"Error trying to write to socket", e)
2182
if e.args[0] != errno.EINTR:
2186
raise errors.ConnectionReset('Sending to %s returned 0 bytes'
2189
if report_activity is not None:
2190
report_activity(sent, 'write')
2193
def connect_socket(address):
2194
# Slight variation of the socket.create_connection() function (provided by
2195
# python-2.6) that can fail if getaddrinfo returns an empty list. We also
2196
# provide it for previous python versions. Also, we don't use the timeout
2197
# parameter (provided by the python implementation) so we don't implement
2199
err = socket.error('getaddrinfo returns an empty list')
2200
host, port = address
2201
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
2202
af, socktype, proto, canonname, sa = res
2205
sock = socket.socket(af, socktype, proto)
2209
except socket.error as e:
2211
# 'err' is now the most recent error
2212
if sock is not None:
2217
def dereference_path(path):
2218
"""Determine the real path to a file.
2220
All parent elements are dereferenced. But the file itself is not
2222
:param path: The original path. May be absolute or relative.
2223
:return: the real path *to* the file
2225
parent, base = os.path.split(path)
2226
# The pathjoin for '.' is a workaround for Python bug #1213894.
2227
# (initial path components aren't dereferenced)
2228
return pathjoin(realpath(pathjoin('.', parent)), base)
2231
def supports_mapi():
2232
"""Return True if we can use MAPI to launch a mail client."""
2233
return sys.platform == "win32"
2236
def resource_string(package, resource_name):
2237
"""Load a resource from a package and return it as a string.
2239
Note: Only packages that start with breezy are currently supported.
2241
This is designed to be a lightweight implementation of resource
2242
loading in a way which is API compatible with the same API from
2244
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2245
If and when pkg_resources becomes a standard library, this routine
2248
# Check package name is within breezy
2249
if package == "breezy":
2250
resource_relpath = resource_name
2251
elif package.startswith("breezy."):
2252
package = package[len("breezy."):].replace('.', os.sep)
2253
resource_relpath = pathjoin(package, resource_name)
2255
raise errors.BzrError('resource package %s not in breezy' % package)
2257
# Map the resource to a file and read its contents
2258
base = dirname(breezy.__file__)
2259
if getattr(sys, 'frozen', None): # bzr.exe
2260
base = abspath(pathjoin(base, '..', '..'))
2261
with open(pathjoin(base, resource_relpath), "rt") as f:
2265
def file_kind_from_stat_mode_thunk(mode):
2266
global file_kind_from_stat_mode
2267
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2269
from ._readdir_pyx import UTF8DirReader
2270
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2272
# This is one time where we won't warn that an extension failed to
2273
# load. The extension is never available on Windows anyway.
2274
from ._readdir_py import (
2275
_kind_from_mode as file_kind_from_stat_mode
2277
return file_kind_from_stat_mode(mode)
2280
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2283
def file_stat(f, _lstat=os.lstat):
2287
except OSError as e:
2288
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2289
raise errors.NoSuchFile(f)
2293
def file_kind(f, _lstat=os.lstat):
2294
stat_value = file_stat(f, _lstat)
2295
return file_kind_from_stat_mode(stat_value.st_mode)
2298
def until_no_eintr(f, *a, **kw):
2299
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2301
WARNING: you must be certain that it is safe to retry the call repeatedly
2302
if EINTR does occur. This is typically only true for low-level operations
2303
like os.read. If in any doubt, don't use this.
2305
Keep in mind that this is not a complete solution to EINTR. There is
2306
probably code in the Python standard library and other dependencies that
2307
may encounter EINTR if a signal arrives (and there is signal handler for
2308
that signal). So this function can reduce the impact for IO that breezy
2309
directly controls, but it is not a complete solution.
2311
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2315
except (IOError, OSError) as e:
2316
if e.errno == errno.EINTR:
2321
if sys.platform == "win32":
2324
return msvcrt.getch()
2329
fd = sys.stdin.fileno()
2330
settings = termios.tcgetattr(fd)
2333
ch = sys.stdin.read(1)
2335
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2338
if sys.platform.startswith('linux'):
2339
def _local_concurrency():
2341
return os.sysconf('SC_NPROCESSORS_ONLN')
2342
except (ValueError, OSError, AttributeError):
2344
elif sys.platform == 'darwin':
2345
def _local_concurrency():
2346
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2347
stdout=subprocess.PIPE).communicate()[0]
2348
elif "bsd" in sys.platform:
2349
def _local_concurrency():
2350
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2351
stdout=subprocess.PIPE).communicate()[0]
2352
elif sys.platform == 'sunos5':
2353
def _local_concurrency():
2354
return subprocess.Popen(['psrinfo', '-p', ],
2355
stdout=subprocess.PIPE).communicate()[0]
2356
elif sys.platform == "win32":
2357
def _local_concurrency():
2358
# This appears to return the number of cores.
2359
return os.environ.get('NUMBER_OF_PROCESSORS')
2361
def _local_concurrency():
2366
_cached_local_concurrency = None
2369
def local_concurrency(use_cache=True):
2370
"""Return how many processes can be run concurrently.
2372
Rely on platform specific implementations and default to 1 (one) if
2373
anything goes wrong.
2375
global _cached_local_concurrency
2377
if _cached_local_concurrency is not None and use_cache:
2378
return _cached_local_concurrency
2380
concurrency = os.environ.get('BRZ_CONCURRENCY', None)
2381
if concurrency is None:
2382
import multiprocessing
2384
concurrency = multiprocessing.cpu_count()
2385
except NotImplementedError:
2386
# multiprocessing.cpu_count() isn't implemented on all platforms
2388
concurrency = _local_concurrency()
2389
except (OSError, IOError):
2392
concurrency = int(concurrency)
2393
except (TypeError, ValueError):
2396
_cached_local_concurrency = concurrency
2400
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2401
"""A stream writer that doesn't decode str arguments."""
2403
def __init__(self, encode, stream, errors='strict'):
2404
codecs.StreamWriter.__init__(self, stream, errors)
2405
self.encode = encode
2407
def write(self, object):
2408
if isinstance(object, str):
2409
self.stream.write(object)
2411
data, _ = self.encode(object, self.errors)
2412
self.stream.write(data)
2415
if sys.platform == 'win32':
2416
def open_file(filename, mode='r', bufsize=-1):
2417
"""This function is used to override the ``open`` builtin.
2419
But it uses O_NOINHERIT flag so the file handle is not inherited by
2420
child processes. Deleting or renaming a closed file opened with this
2421
function is not blocking child processes.
2423
writing = 'w' in mode
2424
appending = 'a' in mode
2425
updating = '+' in mode
2426
binary = 'b' in mode
2429
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2430
# for flags for each modes.
2440
flags |= os.O_WRONLY
2441
flags |= os.O_CREAT | os.O_TRUNC
2446
flags |= os.O_WRONLY
2447
flags |= os.O_CREAT | os.O_APPEND
2452
flags |= os.O_RDONLY
2454
return os.fdopen(os.open(filename, flags), mode, bufsize)
2459
def available_backup_name(base, exists):
2460
"""Find a non-existing backup file name.
2462
This will *not* create anything, this only return a 'free' entry. This
2463
should be used for checking names in a directory below a locked
2464
tree/branch/repo to avoid race conditions. This is LBYL (Look Before You
2465
Leap) and generally discouraged.
2467
:param base: The base name.
2469
:param exists: A callable returning True if the path parameter exists.
2472
name = "%s.~%d~" % (base, counter)
2475
name = "%s.~%d~" % (base, counter)
2479
def set_fd_cloexec(fd):
2480
"""Set a Unix file descriptor's FD_CLOEXEC flag. Do nothing if platform
2481
support for this is not available.
2485
old = fcntl.fcntl(fd, fcntl.F_GETFD)
2486
fcntl.fcntl(fd, fcntl.F_SETFD, old | fcntl.FD_CLOEXEC)
2487
except (ImportError, AttributeError):
2488
# Either the fcntl module or specific constants are not present
2492
def find_executable_on_path(name):
2493
"""Finds an executable on the PATH.
2495
On Windows, this will try to append each extension in the PATHEXT
2496
environment variable to the name, if it cannot be found with the name
2499
:param name: The base name of the executable.
2500
:return: The path to the executable found or None.
2502
if sys.platform == 'win32':
2503
exts = os.environ.get('PATHEXT', '').split(os.pathsep)
2504
exts = [ext.lower() for ext in exts]
2505
base, ext = os.path.splitext(name)
2507
if ext.lower() not in exts:
2513
path = os.environ.get('PATH')
2514
if path is not None:
2515
path = path.split(os.pathsep)
2518
f = os.path.join(d, name) + ext
2519
if os.access(f, os.X_OK):
2521
if sys.platform == 'win32':
2522
app_path = win32utils.get_app_path(name)
2523
if app_path != name:
2528
def _posix_is_local_pid_dead(pid):
2529
"""True if pid doesn't correspond to live process on this machine"""
2531
# Special meaning of unix kill: just check if it's there.
2533
except OSError as e:
2534
if e.errno == errno.ESRCH:
2535
# On this machine, and really not found: as sure as we can be
2538
elif e.errno == errno.EPERM:
2539
# exists, though not ours
2542
trace.mutter("os.kill(%d, 0) failed: %s" % (pid, e))
2543
# Don't really know.
2546
# Exists and our process: not dead.
2550
if sys.platform == "win32":
2551
is_local_pid_dead = win32utils.is_local_pid_dead
2553
is_local_pid_dead = _posix_is_local_pid_dead
2555
_maybe_ignored = ['EAGAIN', 'EINTR', 'ENOTSUP', 'EOPNOTSUPP', 'EACCES']
2556
_fdatasync_ignored = [getattr(errno, name) for name in _maybe_ignored
2557
if getattr(errno, name, None) is not None]
2560
def fdatasync(fileno):
2561
"""Flush file contents to disk if possible.
2563
:param fileno: Integer OS file handle.
2564
:raises TransportNotPossible: If flushing to disk is not possible.
2566
fn = getattr(os, 'fdatasync', getattr(os, 'fsync', None))
2570
except IOError as e:
2571
# See bug #1075108, on some platforms fdatasync exists, but can
2572
# raise ENOTSUP. However, we are calling fdatasync to be helpful
2573
# and reduce the chance of corruption-on-powerloss situations. It
2574
# is not a mandatory call, so it is ok to suppress failures.
2575
trace.mutter("ignoring error calling fdatasync: %s" % (e,))
2576
if getattr(e, 'errno', None) not in _fdatasync_ignored:
2580
def ensure_empty_directory_exists(path, exception_class):
2581
"""Make sure a local directory exists and is empty.
2583
If it does not exist, it is created. If it exists and is not empty, an
2584
instance of exception_class is raised.
2588
except OSError as e:
2589
if e.errno != errno.EEXIST:
2591
if os.listdir(path) != []:
2592
raise exception_class(path)
2595
def is_environment_error(evalue):
2596
"""True if exception instance is due to a process environment issue
2598
This includes OSError and IOError, but also other errors that come from
2599
the operating system or core libraries but are not subclasses of those.
2601
if isinstance(evalue, (EnvironmentError, select.error)):
2603
if sys.platform == "win32" and win32utils._is_pywintypes_error(evalue):