1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
27
from .lazy_import import lazy_import
28
lazy_import(globals(), """
29
from datetime import datetime
35
# We need to import both shutil and rmtree as we export the later on posix
36
# and need the former on windows
38
from shutil import rmtree
41
# We need to import both tempfile and mkdtemp as we export the later on posix
42
# and need the former on windows
44
from tempfile import mkdtemp
52
from breezy.i18n import gettext
73
# Cross platform wall-clock time functionality with decent resolution.
74
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
75
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
76
# synchronized with ``time.time()``, this is only meant to be used to find
77
# delta times by subtracting from another call to this function.
78
timer_func = time.time
79
if sys.platform == 'win32':
80
timer_func = time.clock
82
# On win32, O_BINARY is used to indicate the file should
83
# be opened in binary mode, rather than text mode.
84
# On other platforms, O_BINARY doesn't exist, because
85
# they always open in binary mode, so it is okay to
86
# OR with 0 on those platforms.
87
# O_NOINHERIT and O_TEXT exists only on win32 too.
88
O_BINARY = getattr(os, 'O_BINARY', 0)
89
O_TEXT = getattr(os, 'O_TEXT', 0)
90
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
93
class UnsupportedTimezoneFormat(errors.BzrError):
95
_fmt = ('Unsupported timezone format "%(timezone)s", '
96
'options are "utc", "original", "local".')
98
def __init__(self, timezone):
99
self.timezone = timezone
102
def get_unicode_argv():
106
user_encoding = get_user_encoding()
107
return [a.decode(user_encoding) for a in sys.argv[1:]]
108
except UnicodeDecodeError:
109
raise errors.BzrError(gettext("Parameter {0!r} encoding is unsupported by {1} "
110
"application locale.").format(a, user_encoding))
113
def make_readonly(filename):
114
"""Make a filename read-only."""
115
mod = os.lstat(filename).st_mode
116
if not stat.S_ISLNK(mod):
118
chmod_if_possible(filename, mod)
121
def make_writable(filename):
122
mod = os.lstat(filename).st_mode
123
if not stat.S_ISLNK(mod):
125
chmod_if_possible(filename, mod)
128
def chmod_if_possible(filename, mode):
129
# Set file mode if that can be safely done.
130
# Sometimes even on unix the filesystem won't allow it - see
131
# https://bugs.launchpad.net/bzr/+bug/606537
133
# It is probably faster to just do the chmod, rather than
134
# doing a stat, and then trying to compare
135
os.chmod(filename, mode)
136
except (IOError, OSError) as e:
137
# Permission/access denied seems to commonly happen on smbfs; there's
138
# probably no point warning about it.
139
# <https://bugs.launchpad.net/bzr/+bug/606537>
140
if getattr(e, 'errno') in (errno.EPERM, errno.EACCES):
141
trace.mutter("ignore error on chmod of %r: %r" % (
147
def minimum_path_selection(paths):
148
"""Return the smallset subset of paths which are outside paths.
150
:param paths: A container (and hence not None) of paths.
151
:return: A set of paths sufficient to include everything in paths via
152
is_inside, drawn from the paths parameter.
158
if isinstance(path, bytes):
159
return path.split(b'/')
161
return path.split('/')
162
sorted_paths = sorted(list(paths), key=sort_key)
164
search_paths = [sorted_paths[0]]
165
for path in sorted_paths[1:]:
166
if not is_inside(search_paths[-1], path):
167
# This path is unique, add it
168
search_paths.append(path)
170
return set(search_paths)
177
"""Return a quoted filename filename
179
This previously used backslash quoting, but that works poorly on
181
# TODO: I'm not really sure this is the best format either.x
183
if _QUOTE_RE is None:
184
_QUOTE_RE = re.compile(r'([^a-zA-Z0-9.,:/\\_~-])')
186
if _QUOTE_RE.search(f):
192
_directory_kind = 'directory'
196
"""Return the current umask"""
197
# Assume that people aren't messing with the umask while running
198
# XXX: This is not thread safe, but there is no way to get the
199
# umask without setting it
207
_directory_kind: "/",
209
'tree-reference': '+',
213
def kind_marker(kind):
215
return _kind_marker_map[kind]
217
# Slightly faster than using .get(, '') when the common case is that
222
lexists = getattr(os.path, 'lexists', None)
226
stat = getattr(os, 'lstat', os.stat)
230
if e.errno == errno.ENOENT:
233
raise errors.BzrError(
234
gettext("lstat/stat of ({0!r}): {1!r}").format(f, e))
237
def fancy_rename(old, new, rename_func, unlink_func):
238
"""A fancy rename, when you don't have atomic rename.
240
:param old: The old path, to rename from
241
:param new: The new path, to rename to
242
:param rename_func: The potentially non-atomic rename function
243
:param unlink_func: A way to delete the target file if the full rename
246
# sftp rename doesn't allow overwriting, so play tricks:
247
base = os.path.basename(new)
248
dirname = os.path.dirname(new)
249
# callers use different encodings for the paths so the following MUST
250
# respect that. We rely on python upcasting to unicode if new is unicode
251
# and keeping a str if not.
252
tmp_name = 'tmp.%s.%.9f.%d.%s' % (base, time.time(),
253
os.getpid(), rand_chars(10))
254
tmp_name = pathjoin(dirname, tmp_name)
256
# Rename the file out of the way, but keep track if it didn't exist
257
# We don't want to grab just any exception
258
# something like EACCES should prevent us from continuing
259
# The downside is that the rename_func has to throw an exception
260
# with an errno = ENOENT, or NoSuchFile
263
rename_func(new, tmp_name)
264
except (errors.NoSuchFile,):
267
# RBC 20060103 abstraction leakage: the paramiko SFTP clients rename
268
# function raises an IOError with errno is None when a rename fails.
269
# This then gets caught here.
270
if e.errno not in (None, errno.ENOENT, errno.ENOTDIR):
272
except Exception as e:
273
if (getattr(e, 'errno', None) is None
274
or e.errno not in (errno.ENOENT, errno.ENOTDIR)):
281
# This may throw an exception, in which case success will
283
rename_func(old, new)
285
except (IOError, OSError) as e:
286
# source and target may be aliases of each other (e.g. on a
287
# case-insensitive filesystem), so we may have accidentally renamed
288
# source by when we tried to rename target
289
if (file_existed and e.errno in (None, errno.ENOENT)
290
and old.lower() == new.lower()):
291
# source and target are the same file on a case-insensitive
292
# filesystem, so we don't generate an exception
298
# If the file used to exist, rename it back into place
299
# otherwise just delete it from the tmp location
301
unlink_func(tmp_name)
303
rename_func(tmp_name, new)
306
# In Python 2.4.2 and older, os.path.abspath and os.path.realpath
307
# choke on a Unicode string containing a relative path if
308
# os.getcwd() returns a non-sys.getdefaultencoding()-encoded
310
def _posix_abspath(path):
311
# jam 20060426 rather than encoding to fsencoding
312
# copy posixpath.abspath, but use os.getcwdu instead
313
if not posixpath.isabs(path):
314
path = posixpath.join(getcwd(), path)
315
return _posix_normpath(path)
318
def _posix_realpath(path):
319
return posixpath.realpath(path.encode(_fs_enc)).decode(_fs_enc)
322
def _posix_normpath(path):
323
path = posixpath.normpath(path)
324
# Bug 861008: posixpath.normpath() returns a path normalized according to
325
# the POSIX standard, which stipulates (for compatibility reasons) that two
326
# leading slashes must not be simplified to one, and only if there are 3 or
327
# more should they be simplified as one. So we treat the leading 2 slashes
328
# as a special case here by simply removing the first slash, as we consider
329
# that breaking POSIX compatibility for this obscure feature is acceptable.
330
# This is not a paranoid precaution, as we notably get paths like this when
331
# the repo is hosted at the root of the filesystem, i.e. in "/".
332
if path.startswith('//'):
337
def _posix_path_from_environ(key):
338
"""Get unicode path from `key` in environment or None if not present
340
Note that posix systems use arbitrary byte strings for filesystem objects,
341
so a path that raises BadFilenameEncoding here may still be accessible.
343
val = os.environ.get(key, None)
344
if PY3 or val is None:
347
return val.decode(_fs_enc)
348
except UnicodeDecodeError:
349
# GZ 2011-12-12:Ideally want to include `key` in the exception message
350
raise errors.BadFilenameEncoding(val, _fs_enc)
353
def _posix_get_home_dir():
354
"""Get the home directory of the current user as a unicode path"""
355
path = posixpath.expanduser("~")
357
return path.decode(_fs_enc)
358
except AttributeError:
360
except UnicodeDecodeError:
361
raise errors.BadFilenameEncoding(path, _fs_enc)
364
def _posix_getuser_unicode():
365
"""Get username from environment or password database as unicode"""
366
name = getpass.getuser()
369
user_encoding = get_user_encoding()
371
return name.decode(user_encoding)
372
except UnicodeDecodeError:
373
raise errors.BzrError("Encoding of username %r is unsupported by %s "
374
"application locale." % (name, user_encoding))
377
def _win32_fixdrive(path):
378
"""Force drive letters to be consistent.
380
win32 is inconsistent whether it returns lower or upper case
381
and even if it was consistent the user might type the other
382
so we force it to uppercase
383
running python.exe under cmd.exe return capital C:\\
384
running win32 python inside a cygwin shell returns lowercase c:\\
386
drive, path = ntpath.splitdrive(path)
387
return drive.upper() + path
390
def _win32_abspath(path):
391
# Real ntpath.abspath doesn't have a problem with a unicode cwd
392
return _win32_fixdrive(ntpath.abspath(path).replace('\\', '/'))
395
def _win32_realpath(path):
396
# Real ntpath.realpath doesn't have a problem with a unicode cwd
397
return _win32_fixdrive(ntpath.realpath(path).replace('\\', '/'))
400
def _win32_pathjoin(*args):
401
return ntpath.join(*args).replace('\\', '/')
404
def _win32_normpath(path):
405
return _win32_fixdrive(ntpath.normpath(path).replace('\\', '/'))
409
return _win32_fixdrive(_getcwd().replace('\\', '/'))
412
def _win32_mkdtemp(*args, **kwargs):
413
return _win32_fixdrive(tempfile.mkdtemp(*args, **kwargs).replace('\\', '/'))
416
def _win32_rename(old, new):
417
"""We expect to be able to atomically replace 'new' with old.
419
On win32, if new exists, it must be moved out of the way first,
423
fancy_rename(old, new, rename_func=os.rename, unlink_func=os.unlink)
425
if e.errno in (errno.EPERM, errno.EACCES, errno.EBUSY, errno.EINVAL):
426
# If we try to rename a non-existant file onto cwd, we get
427
# EPERM or EACCES instead of ENOENT, this will raise ENOENT
428
# if the old path doesn't exist, sometimes we get EACCES
429
# On Linux, we seem to get EBUSY, on Mac we get EINVAL
435
return unicodedata.normalize('NFC', _getcwd())
438
def _rename_wrap_exception(rename_func):
439
"""Adds extra information to any exceptions that come from rename().
441
The exception has an updated message and 'old_filename' and 'new_filename'
445
def _rename_wrapper(old, new):
447
rename_func(old, new)
449
detailed_error = OSError(e.errno, e.strerror +
450
" [occurred when renaming '%s' to '%s']" %
452
detailed_error.old_filename = old
453
detailed_error.new_filename = new
456
return _rename_wrapper
459
if sys.version_info > (3,):
465
# Default rename wraps os.rename()
466
rename = _rename_wrap_exception(os.rename)
468
# Default is to just use the python builtins, but these can be rebound on
469
# particular platforms.
470
abspath = _posix_abspath
471
realpath = _posix_realpath
472
pathjoin = os.path.join
473
normpath = _posix_normpath
474
path_from_environ = _posix_path_from_environ
475
_get_home_dir = _posix_get_home_dir
476
getuser_unicode = _posix_getuser_unicode
478
dirname = os.path.dirname
479
basename = os.path.basename
480
split = os.path.split
481
splitext = os.path.splitext
482
# These were already lazily imported into local scope
483
# mkdtemp = tempfile.mkdtemp
484
# rmtree = shutil.rmtree
493
MIN_ABS_PATHLENGTH = 1
496
if sys.platform == 'win32':
497
abspath = _win32_abspath
498
realpath = _win32_realpath
499
pathjoin = _win32_pathjoin
500
normpath = _win32_normpath
501
getcwd = _win32_getcwd
502
mkdtemp = _win32_mkdtemp
503
rename = _rename_wrap_exception(_win32_rename)
505
from . import _walkdirs_win32
509
lstat = _walkdirs_win32.lstat
510
fstat = _walkdirs_win32.fstat
511
wrap_stat = _walkdirs_win32.wrap_stat
513
MIN_ABS_PATHLENGTH = 3
515
def _win32_delete_readonly(function, path, excinfo):
516
"""Error handler for shutil.rmtree function [for win32]
517
Helps to remove files and dirs marked as read-only.
519
exception = excinfo[1]
520
if function in (os.remove, os.rmdir) \
521
and isinstance(exception, OSError) \
522
and exception.errno == errno.EACCES:
528
def rmtree(path, ignore_errors=False, onerror=_win32_delete_readonly):
529
"""Replacer for shutil.rmtree: could remove readonly dirs/files"""
530
return shutil.rmtree(path, ignore_errors, onerror)
532
f = win32utils.get_unicode_argv # special function or None
535
path_from_environ = win32utils.get_environ_unicode
536
_get_home_dir = win32utils.get_home_location
537
getuser_unicode = win32utils.get_user_name
539
elif sys.platform == 'darwin':
543
def get_terminal_encoding(trace=False):
544
"""Find the best encoding for printing to the screen.
546
This attempts to check both sys.stdout and sys.stdin to see
547
what encoding they are in, and if that fails it falls back to
548
osutils.get_user_encoding().
549
The problem is that on Windows, locale.getpreferredencoding()
550
is not the same encoding as that used by the console:
551
http://mail.python.org/pipermail/python-list/2003-May/162357.html
553
On my standard US Windows XP, the preferred encoding is
554
cp1252, but the console is cp437
556
:param trace: If True trace the selected encoding via mutter().
558
from .trace import mutter
559
output_encoding = getattr(sys.stdout, 'encoding', None)
560
if not output_encoding:
561
input_encoding = getattr(sys.stdin, 'encoding', None)
562
if not input_encoding:
563
output_encoding = get_user_encoding()
565
mutter('encoding stdout as osutils.get_user_encoding() %r',
568
output_encoding = input_encoding
570
mutter('encoding stdout as sys.stdin encoding %r',
574
mutter('encoding stdout as sys.stdout encoding %r', output_encoding)
575
if output_encoding == 'cp0':
576
# invalid encoding (cp0 means 'no codepage' on Windows)
577
output_encoding = get_user_encoding()
579
mutter('cp0 is invalid encoding.'
580
' encoding stdout as osutils.get_user_encoding() %r',
584
codecs.lookup(output_encoding)
586
sys.stderr.write('brz: warning:'
587
' unknown terminal encoding %s.\n'
588
' Using encoding %s instead.\n'
589
% (output_encoding, get_user_encoding())
591
output_encoding = get_user_encoding()
593
return output_encoding
596
def normalizepath(f):
597
if getattr(os.path, 'realpath', None) is not None:
601
[p, e] = os.path.split(f)
602
if e == "" or e == "." or e == "..":
605
return pathjoin(F(p), e)
609
"""True if f is an accessible directory."""
611
return stat.S_ISDIR(os.lstat(f)[stat.ST_MODE])
617
"""True if f is a regular file."""
619
return stat.S_ISREG(os.lstat(f)[stat.ST_MODE])
625
"""True if f is a symlink."""
627
return stat.S_ISLNK(os.lstat(f)[stat.ST_MODE])
632
def is_inside(dir, fname):
633
"""True if fname is inside dir.
635
The parameters should typically be passed to osutils.normpath first, so
636
that . and .. and repeated slashes are eliminated, and the separators
637
are canonical for the platform.
639
The empty string as a dir name is taken as top-of-tree and matches
642
# XXX: Most callers of this can actually do something smarter by
643
# looking at the inventory
650
if isinstance(dir, bytes):
651
if not dir.endswith(b'/'):
654
if not dir.endswith('/'):
657
return fname.startswith(dir)
660
def is_inside_any(dir_list, fname):
661
"""True if fname is inside any of given dirs."""
662
for dirname in dir_list:
663
if is_inside(dirname, fname):
668
def is_inside_or_parent_of_any(dir_list, fname):
669
"""True if fname is a child or a parent of any of the given files."""
670
for dirname in dir_list:
671
if is_inside(dirname, fname) or is_inside(fname, dirname):
676
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
677
report_activity=None, direction='read'):
678
"""Copy contents of one file to another.
680
The read_length can either be -1 to read to end-of-file (EOF) or
681
it can specify the maximum number of bytes to read.
683
The buff_size represents the maximum size for each read operation
684
performed on from_file.
686
:param report_activity: Call this as bytes are read, see
687
Transport._report_activity
688
:param direction: Will be passed to report_activity
690
:return: The number of bytes copied.
694
# read specified number of bytes
696
while read_length > 0:
697
num_bytes_to_read = min(read_length, buff_size)
699
block = from_file.read(num_bytes_to_read)
703
if report_activity is not None:
704
report_activity(len(block), direction)
707
actual_bytes_read = len(block)
708
read_length -= actual_bytes_read
709
length += actual_bytes_read
713
block = from_file.read(buff_size)
717
if report_activity is not None:
718
report_activity(len(block), direction)
724
def pump_string_file(bytes, file_handle, segment_size=None):
725
"""Write bytes to file_handle in many smaller writes.
727
:param bytes: The string to write.
728
:param file_handle: The file to write to.
730
# Write data in chunks rather than all at once, because very large
731
# writes fail on some platforms (e.g. Windows with SMB mounted
734
segment_size = 5242880 # 5MB
735
offsets = range(0, len(bytes), segment_size)
736
view = memoryview(bytes)
737
write = file_handle.write
738
for offset in offsets:
739
write(view[offset:offset + segment_size])
742
def file_iterator(input_file, readsize=32768):
744
b = input_file.read(readsize)
750
# GZ 2017-09-16: Makes sense in general for hexdigest() result to be text, but
751
# used as bytes through most interfaces so encode with this wrapper.
753
def _hexdigest(hashobj):
754
return hashobj.hexdigest().encode()
756
def _hexdigest(hashobj):
757
return hashobj.hexdigest()
761
"""Calculate the hexdigest of an open file.
763
The file cursor should be already at the start.
775
def size_sha_file(f):
776
"""Calculate the size and hexdigest of an open file.
778
The file cursor should be already at the start and
779
the caller is responsible for closing the file afterwards.
790
return size, _hexdigest(s)
793
def sha_file_by_name(fname):
794
"""Calculate the SHA1 of a file by reading the full text"""
796
f = os.open(fname, os.O_RDONLY | O_BINARY | O_NOINHERIT)
799
b = os.read(f, 1 << 16)
807
def sha_strings(strings, _factory=sha):
808
"""Return the sha-1 of concatenation of strings"""
810
for string in strings:
815
def sha_string(f, _factory=sha):
816
# GZ 2017-09-16: Dodgy if factory is ever not sha, probably shouldn't be.
817
return _hexdigest(_factory(f))
820
def fingerprint_file(f):
822
return {'size': len(b),
823
'sha1': _hexdigest(sha(b))}
826
def compare_files(a, b):
827
"""Returns true if equal in contents"""
838
def local_time_offset(t=None):
839
"""Return offset of local zone from GMT, either at present or at time t."""
842
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
843
return offset.days * 86400 + offset.seconds
846
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
847
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
850
def format_date(t, offset=0, timezone='original', date_fmt=None,
852
"""Return a formatted date string.
854
:param t: Seconds since the epoch.
855
:param offset: Timezone offset in seconds east of utc.
856
:param timezone: How to display the time: 'utc', 'original' for the
857
timezone specified by offset, or 'local' for the process's current
859
:param date_fmt: strftime format.
860
:param show_offset: Whether to append the timezone.
862
(date_fmt, tt, offset_str) = \
863
_format_date(t, offset, timezone, date_fmt, show_offset)
864
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
865
date_str = time.strftime(date_fmt, tt)
866
return date_str + offset_str
869
# Cache of formatted offset strings
873
def format_date_with_offset_in_original_timezone(t, offset=0,
874
_cache=_offset_cache):
875
"""Return a formatted date string in the original timezone.
877
This routine may be faster then format_date.
879
:param t: Seconds since the epoch.
880
:param offset: Timezone offset in seconds east of utc.
884
tt = time.gmtime(t + offset)
885
date_fmt = _default_format_by_weekday_num[tt[6]]
886
date_str = time.strftime(date_fmt, tt)
887
offset_str = _cache.get(offset, None)
888
if offset_str is None:
889
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
890
_cache[offset] = offset_str
891
return date_str + offset_str
894
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
896
"""Return an unicode date string formatted according to the current locale.
898
:param t: Seconds since the epoch.
899
:param offset: Timezone offset in seconds east of utc.
900
:param timezone: How to display the time: 'utc', 'original' for the
901
timezone specified by offset, or 'local' for the process's current
903
:param date_fmt: strftime format.
904
:param show_offset: Whether to append the timezone.
906
(date_fmt, tt, offset_str) = \
907
_format_date(t, offset, timezone, date_fmt, show_offset)
908
date_str = time.strftime(date_fmt, tt)
909
if not isinstance(date_str, text_type):
910
date_str = date_str.decode(get_user_encoding(), 'replace')
911
return date_str + offset_str
914
def _format_date(t, offset, timezone, date_fmt, show_offset):
915
if timezone == 'utc':
918
elif timezone == 'original':
921
tt = time.gmtime(t + offset)
922
elif timezone == 'local':
923
tt = time.localtime(t)
924
offset = local_time_offset(t)
926
raise UnsupportedTimezoneFormat(timezone)
928
date_fmt = "%a %Y-%m-%d %H:%M:%S"
930
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
933
return (date_fmt, tt, offset_str)
936
def compact_date(when):
937
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
940
def format_delta(delta):
941
"""Get a nice looking string for a time delta.
943
:param delta: The time difference in seconds, can be positive or negative.
944
positive indicates time in the past, negative indicates time in the
945
future. (usually time.time() - stored_time)
946
:return: String formatted to show approximate resolution
952
direction = 'in the future'
956
if seconds < 90: # print seconds up to 90 seconds
958
return '%d second %s' % (seconds, direction,)
960
return '%d seconds %s' % (seconds, direction)
962
minutes = int(seconds / 60)
963
seconds -= 60 * minutes
968
if minutes < 90: # print minutes, seconds up to 90 minutes
970
return '%d minute, %d second%s %s' % (
971
minutes, seconds, plural_seconds, direction)
973
return '%d minutes, %d second%s %s' % (
974
minutes, seconds, plural_seconds, direction)
976
hours = int(minutes / 60)
977
minutes -= 60 * hours
984
return '%d hour, %d minute%s %s' % (hours, minutes,
985
plural_minutes, direction)
986
return '%d hours, %d minute%s %s' % (hours, minutes,
987
plural_minutes, direction)
991
"""Return size of given open file."""
992
return os.fstat(f.fileno())[stat.ST_SIZE]
995
# Alias os.urandom to support platforms (which?) without /dev/urandom and
996
# override if it doesn't work. Avoid checking on windows where there is
997
# significant initialisation cost that can be avoided for some bzr calls.
999
rand_bytes = os.urandom
1001
if rand_bytes.__module__ != "nt":
1004
except NotImplementedError:
1005
# not well seeded, but better than nothing
1010
s += chr(random.randint(0, 255))
1015
ALNUM = '0123456789abcdefghijklmnopqrstuvwxyz'
1018
def rand_chars(num):
1019
"""Return a random string of num alphanumeric characters
1021
The result only contains lowercase chars because it may be used on
1022
case-insensitive filesystems.
1025
for raw_byte in rand_bytes(num):
1027
s += ALNUM[ord(raw_byte) % 36]
1029
s += ALNUM[raw_byte % 36]
1033
# TODO: We could later have path objects that remember their list
1034
# decomposition (might be too tricksy though.)
1037
"""Turn string into list of parts."""
1038
if os.path.sep == '\\':
1039
# split on either delimiter because people might use either on
1041
if isinstance(p, bytes):
1042
ps = re.split(b'[\\\\/]', p)
1044
ps = re.split(r'[\\/]', p)
1046
if isinstance(p, bytes):
1053
if f in ('..', b'..'):
1054
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1055
elif f in ('.', '', b'.', b''):
1064
if (f == '..') or (f is None) or (f == ''):
1065
raise errors.BzrError(gettext("sorry, %r not allowed in path") % f)
1069
def parent_directories(filename):
1070
"""Return the list of parent directories, deepest first.
1072
For example, parent_directories("a/b/c") -> ["a/b", "a"].
1075
parts = splitpath(dirname(filename))
1077
parents.append(joinpath(parts))
1082
_extension_load_failures = []
1085
def failed_to_load_extension(exception):
1086
"""Handle failing to load a binary extension.
1088
This should be called from the ImportError block guarding the attempt to
1089
import the native extension. If this function returns, the pure-Python
1090
implementation should be loaded instead::
1093
>>> import breezy._fictional_extension_pyx
1094
>>> except ImportError, e:
1095
>>> breezy.osutils.failed_to_load_extension(e)
1096
>>> import breezy._fictional_extension_py
1098
# NB: This docstring is just an example, not a doctest, because doctest
1099
# currently can't cope with the use of lazy imports in this namespace --
1102
# This currently doesn't report the failure at the time it occurs, because
1103
# they tend to happen very early in startup when we can't check config
1104
# files etc, and also we want to report all failures but not spam the user
1106
exception_str = str(exception)
1107
if exception_str not in _extension_load_failures:
1108
trace.mutter("failed to load compiled extension: %s" % exception_str)
1109
_extension_load_failures.append(exception_str)
1112
def report_extension_load_failures():
1113
if not _extension_load_failures:
1115
if config.GlobalConfig().suppress_warning('missing_extensions'):
1117
# the warnings framework should by default show this only once
1118
from .trace import warning
1120
"brz: warning: some compiled extensions could not be loaded; "
1121
"see ``brz help missing-extensions``")
1122
# we no longer show the specific missing extensions here, because it makes
1123
# the message too long and scary - see
1124
# https://bugs.launchpad.net/bzr/+bug/430529
1128
from ._chunks_to_lines_pyx import chunks_to_lines
1129
except ImportError as e:
1130
failed_to_load_extension(e)
1131
from ._chunks_to_lines_py import chunks_to_lines
1135
"""Split s into lines, but without removing the newline characters."""
1136
# Trivially convert a fulltext into a 'chunked' representation, and let
1137
# chunks_to_lines do the heavy lifting.
1138
if isinstance(s, bytes):
1139
# chunks_to_lines only supports 8-bit strings
1140
return chunks_to_lines([s])
1142
return _split_lines(s)
1145
def _split_lines(s):
1146
"""Split s into lines, but without removing the newline characters.
1148
This supports Unicode or plain string objects.
1150
nl = b'\n' if isinstance(s, bytes) else u'\n'
1152
result = [line + nl for line in lines[:-1]]
1154
result.append(lines[-1])
1158
def hardlinks_good():
1159
return sys.platform not in ('win32', 'cygwin', 'darwin')
1162
def link_or_copy(src, dest):
1163
"""Hardlink a file, or copy it if it can't be hardlinked."""
1164
if not hardlinks_good():
1165
shutil.copyfile(src, dest)
1169
except (OSError, IOError) as e:
1170
if e.errno != errno.EXDEV:
1172
shutil.copyfile(src, dest)
1175
def delete_any(path):
1176
"""Delete a file, symlink or directory.
1178
Will delete even if readonly.
1181
_delete_file_or_dir(path)
1182
except (OSError, IOError) as e:
1183
if e.errno in (errno.EPERM, errno.EACCES):
1184
# make writable and try again
1187
except (OSError, IOError):
1189
_delete_file_or_dir(path)
1194
def _delete_file_or_dir(path):
1195
# Look Before You Leap (LBYL) is appropriate here instead of Easier to Ask for
1196
# Forgiveness than Permission (EAFP) because:
1197
# - root can damage a solaris file system by using unlink,
1198
# - unlink raises different exceptions on different OSes (linux: EISDIR, win32:
1199
# EACCES, OSX: EPERM) when invoked on a directory.
1200
if isdir(path): # Takes care of symlinks
1207
if getattr(os, 'symlink', None) is not None:
1213
def has_hardlinks():
1214
if getattr(os, 'link', None) is not None:
1220
def host_os_dereferences_symlinks():
1221
return (has_symlinks()
1222
and sys.platform not in ('cygwin', 'win32'))
1225
def readlink(abspath):
1226
"""Return a string representing the path to which the symbolic link points.
1228
:param abspath: The link absolute unicode path.
1230
This his guaranteed to return the symbolic link in unicode in all python
1233
link = abspath.encode(_fs_enc)
1234
target = os.readlink(link)
1235
target = target.decode(_fs_enc)
1239
def contains_whitespace(s):
1240
"""True if there are any whitespace characters in s."""
1241
# string.whitespace can include '\xa0' in certain locales, because it is
1242
# considered "non-breaking-space" as part of ISO-8859-1. But it
1243
# 1) Isn't a breaking whitespace
1244
# 2) Isn't one of ' \t\r\n' which are characters we sometimes use as
1246
# 3) '\xa0' isn't unicode safe since it is >128.
1248
if isinstance(s, str):
1251
ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f')
1259
def contains_linebreaks(s):
1260
"""True if there is any vertical whitespace in s."""
1268
def relpath(base, path):
1269
"""Return path relative to base, or raise PathNotChild exception.
1271
The path may be either an absolute path or a path relative to the
1272
current working directory.
1274
os.path.commonprefix (python2.4) has a bad bug that it works just
1275
on string prefixes, assuming that '/u' is a prefix of '/u2'. This
1276
avoids that problem.
1278
NOTE: `base` should not have a trailing slash otherwise you'll get
1279
PathNotChild exceptions regardless of `path`.
1282
if len(base) < MIN_ABS_PATHLENGTH:
1283
# must have space for e.g. a drive letter
1284
raise ValueError(gettext('%r is too short to calculate a relative path')
1292
if len(head) <= len(base) and head != base:
1293
raise errors.PathNotChild(rp, base)
1296
head, tail = split(head)
1301
return pathjoin(*reversed(s))
1306
def _cicp_canonical_relpath(base, path):
1307
"""Return the canonical path relative to base.
1309
Like relpath, but on case-insensitive-case-preserving file-systems, this
1310
will return the relpath as stored on the file-system rather than in the
1311
case specified in the input string, for all existing portions of the path.
1313
This will cause O(N) behaviour if called for every path in a tree; if you
1314
have a number of paths to convert, you should use canonical_relpaths().
1316
# TODO: it should be possible to optimize this for Windows by using the
1317
# win32 API FindFiles function to look for the specified name - but using
1318
# os.listdir() still gives us the correct, platform agnostic semantics in
1321
rel = relpath(base, path)
1322
# '.' will have been turned into ''
1326
abs_base = abspath(base)
1328
_listdir = os.listdir
1330
# use an explicit iterator so we can easily consume the rest on early exit.
1331
bit_iter = iter(rel.split('/'))
1332
for bit in bit_iter:
1335
next_entries = _listdir(current)
1336
except OSError: # enoent, eperm, etc
1337
# We can't find this in the filesystem, so just append the
1339
current = pathjoin(current, bit, *list(bit_iter))
1341
for look in next_entries:
1342
if lbit == look.lower():
1343
current = pathjoin(current, look)
1346
# got to the end, nothing matched, so we just return the
1347
# non-existing bits as they were specified (the filename may be
1348
# the target of a move, for example).
1349
current = pathjoin(current, bit, *list(bit_iter))
1351
return current[len(abs_base):].lstrip('/')
1354
# XXX - TODO - we need better detection/integration of case-insensitive
1355
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1356
# filesystems), for example, so could probably benefit from the same basic
1357
# support there. For now though, only Windows and OSX get that support, and
1358
# they get it for *all* file-systems!
1359
if sys.platform in ('win32', 'darwin'):
1360
canonical_relpath = _cicp_canonical_relpath
1362
canonical_relpath = relpath
1365
def canonical_relpaths(base, paths):
1366
"""Create an iterable to canonicalize a sequence of relative paths.
1368
The intent is for this implementation to use a cache, vastly speeding
1369
up multiple transformations in the same directory.
1371
# but for now, we haven't optimized...
1372
return [canonical_relpath(base, p) for p in paths]
1375
def decode_filename(filename):
1376
"""Decode the filename using the filesystem encoding
1378
If it is unicode, it is returned.
1379
Otherwise it is decoded from the the filesystem's encoding. If decoding
1380
fails, a errors.BadFilenameEncoding exception is raised.
1382
if isinstance(filename, text_type):
1385
return filename.decode(_fs_enc)
1386
except UnicodeDecodeError:
1387
raise errors.BadFilenameEncoding(filename, _fs_enc)
1390
def safe_unicode(unicode_or_utf8_string):
1391
"""Coerce unicode_or_utf8_string into unicode.
1393
If it is unicode, it is returned.
1394
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1395
wrapped in a BzrBadParameterNotUnicode exception.
1397
if isinstance(unicode_or_utf8_string, text_type):
1398
return unicode_or_utf8_string
1400
return unicode_or_utf8_string.decode('utf8')
1401
except UnicodeDecodeError:
1402
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1405
def safe_utf8(unicode_or_utf8_string):
1406
"""Coerce unicode_or_utf8_string to a utf8 string.
1408
If it is a str, it is returned.
1409
If it is Unicode, it is encoded into a utf-8 string.
1411
if isinstance(unicode_or_utf8_string, bytes):
1412
# TODO: jam 20070209 This is overkill, and probably has an impact on
1413
# performance if we are dealing with lots of apis that want a
1416
# Make sure it is a valid utf-8 string
1417
unicode_or_utf8_string.decode('utf-8')
1418
except UnicodeDecodeError:
1419
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1420
return unicode_or_utf8_string
1421
return unicode_or_utf8_string.encode('utf-8')
1424
def safe_revision_id(unicode_or_utf8_string):
1425
"""Revision ids should now be utf8, but at one point they were unicode.
1427
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1429
:return: None or a utf8 revision id.
1431
if (unicode_or_utf8_string is None
1432
or unicode_or_utf8_string.__class__ == bytes):
1433
return unicode_or_utf8_string
1434
raise TypeError('Unicode revision ids are no longer supported. '
1435
'Revision id generators should be creating utf8 revision '
1439
def safe_file_id(unicode_or_utf8_string):
1440
"""File ids should now be utf8, but at one point they were unicode.
1442
This is the same as safe_utf8, except it uses the cached encode functions
1443
to save a little bit of performance.
1445
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1447
:return: None or a utf8 file id.
1449
if (unicode_or_utf8_string is None
1450
or unicode_or_utf8_string.__class__ == bytes):
1451
return unicode_or_utf8_string
1452
raise TypeError('Unicode file ids are no longer supported. '
1453
'File id generators should be creating utf8 file ids.')
1456
_platform_normalizes_filenames = False
1457
if sys.platform == 'darwin':
1458
_platform_normalizes_filenames = True
1461
def normalizes_filenames():
1462
"""Return True if this platform normalizes unicode filenames.
1466
return _platform_normalizes_filenames
1469
def _accessible_normalized_filename(path):
1470
"""Get the unicode normalized path, and if you can access the file.
1472
On platforms where the system normalizes filenames (Mac OSX),
1473
you can access a file by any path which will normalize correctly.
1474
On platforms where the system does not normalize filenames
1475
(everything else), you have to access a file by its exact path.
1477
Internally, bzr only supports NFC normalization, since that is
1478
the standard for XML documents.
1480
So return the normalized path, and a flag indicating if the file
1481
can be accessed by that path.
1484
if isinstance(path, bytes):
1485
path = path.decode(sys.getfilesystemencoding())
1486
return unicodedata.normalize('NFC', path), True
1489
def _inaccessible_normalized_filename(path):
1490
__doc__ = _accessible_normalized_filename.__doc__
1492
if isinstance(path, bytes):
1493
path = path.decode(sys.getfilesystemencoding())
1494
normalized = unicodedata.normalize('NFC', path)
1495
return normalized, normalized == path
1498
if _platform_normalizes_filenames:
1499
normalized_filename = _accessible_normalized_filename
1501
normalized_filename = _inaccessible_normalized_filename
1504
def set_signal_handler(signum, handler, restart_syscall=True):
1505
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1506
on platforms that support that.
1508
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1509
automatically restart (by calling `signal.siginterrupt(signum,
1510
False)`). May be ignored if the feature is not available on this
1511
platform or Python version.
1515
siginterrupt = signal.siginterrupt
1517
# This python implementation doesn't provide signal support, hence no
1520
except AttributeError:
1521
# siginterrupt doesn't exist on this platform, or for this version
1523
def siginterrupt(signum, flag): return None
1525
def sig_handler(*args):
1526
# Python resets the siginterrupt flag when a signal is
1527
# received. <http://bugs.python.org/issue8354>
1528
# As a workaround for some cases, set it back the way we want it.
1529
siginterrupt(signum, False)
1530
# Now run the handler function passed to set_signal_handler.
1533
sig_handler = handler
1534
old_handler = signal.signal(signum, sig_handler)
1536
siginterrupt(signum, False)
1540
default_terminal_width = 80
1541
"""The default terminal width for ttys.
1543
This is defined so that higher levels can share a common fallback value when
1544
terminal_width() returns None.
1547
# Keep some state so that terminal_width can detect if _terminal_size has
1548
# returned a different size since the process started. See docstring and
1549
# comments of terminal_width for details.
1550
# _terminal_size_state has 3 possible values: no_data, unchanged, and changed.
1551
_terminal_size_state = 'no_data'
1552
_first_terminal_size = None
1555
def terminal_width():
1556
"""Return terminal width.
1558
None is returned if the width can't established precisely.
1561
- if BRZ_COLUMNS is set, returns its value
1562
- if there is no controlling terminal, returns None
1563
- query the OS, if the queried size has changed since the last query,
1565
- if COLUMNS is set, returns its value,
1566
- if the OS has a value (even though it's never changed), return its value.
1568
From there, we need to query the OS to get the size of the controlling
1571
On Unices we query the OS by:
1572
- get termios.TIOCGWINSZ
1573
- if an error occurs or a negative value is obtained, returns None
1575
On Windows we query the OS by:
1576
- win32utils.get_console_size() decides,
1577
- returns None on error (provided default value)
1579
# Note to implementors: if changing the rules for determining the width,
1580
# make sure you've considered the behaviour in these cases:
1581
# - M-x shell in emacs, where $COLUMNS is set and TIOCGWINSZ returns 0,0.
1582
# - brz log | less, in bash, where $COLUMNS not set and TIOCGWINSZ returns
1584
# - (add more interesting cases here, if you find any)
1585
# Some programs implement "Use $COLUMNS (if set) until SIGWINCH occurs",
1586
# but we don't want to register a signal handler because it is impossible
1587
# to do so without risking EINTR errors in Python <= 2.6.5 (see
1588
# <http://bugs.python.org/issue8354>). Instead we check TIOCGWINSZ every
1589
# time so we can notice if the reported size has changed, which should have
1592
# If BRZ_COLUMNS is set, take it, user is always right
1593
# Except if they specified 0 in which case, impose no limit here
1595
width = int(os.environ['BRZ_COLUMNS'])
1596
except (KeyError, ValueError):
1598
if width is not None:
1604
isatty = getattr(sys.stdout, 'isatty', None)
1605
if isatty is None or not isatty():
1606
# Don't guess, setting BRZ_COLUMNS is the recommended way to override.
1610
width, height = os_size = _terminal_size(None, None)
1611
global _first_terminal_size, _terminal_size_state
1612
if _terminal_size_state == 'no_data':
1613
_first_terminal_size = os_size
1614
_terminal_size_state = 'unchanged'
1615
elif (_terminal_size_state == 'unchanged' and
1616
_first_terminal_size != os_size):
1617
_terminal_size_state = 'changed'
1619
# If the OS claims to know how wide the terminal is, and this value has
1620
# ever changed, use that.
1621
if _terminal_size_state == 'changed':
1622
if width is not None and width > 0:
1625
# If COLUMNS is set, use it.
1627
return int(os.environ['COLUMNS'])
1628
except (KeyError, ValueError):
1631
# Finally, use an unchanged size from the OS, if we have one.
1632
if _terminal_size_state == 'unchanged':
1633
if width is not None and width > 0:
1636
# The width could not be determined.
1640
def _win32_terminal_size(width, height):
1641
width, height = win32utils.get_console_size(
1642
defaultx=width, defaulty=height)
1643
return width, height
1646
def _ioctl_terminal_size(width, height):
1651
s = struct.pack('HHHH', 0, 0, 0, 0)
1652
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
1653
height, width = struct.unpack('HHHH', x)[0:2]
1654
except (IOError, AttributeError):
1656
return width, height
1659
_terminal_size = None
1660
"""Returns the terminal size as (width, height).
1662
:param width: Default value for width.
1663
:param height: Default value for height.
1665
This is defined specifically for each OS and query the size of the controlling
1666
terminal. If any error occurs, the provided default values should be returned.
1668
if sys.platform == 'win32':
1669
_terminal_size = _win32_terminal_size
1671
_terminal_size = _ioctl_terminal_size
1674
def supports_executable():
1675
return sys.platform != "win32"
1678
def supports_posix_readonly():
1679
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1681
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1682
directory controls creation/deletion, etc.
1684
And under win32, readonly means that the directory itself cannot be
1685
deleted. The contents of a readonly directory can be changed, unlike POSIX
1686
where files in readonly directories cannot be added, deleted or renamed.
1688
return sys.platform != "win32"
1691
def set_or_unset_env(env_variable, value):
1692
"""Modify the environment, setting or removing the env_variable.
1694
:param env_variable: The environment variable in question
1695
:param value: The value to set the environment to. If None, then
1696
the variable will be removed.
1697
:return: The original value of the environment variable.
1699
orig_val = os.environ.get(env_variable)
1701
if orig_val is not None:
1702
del os.environ[env_variable]
1704
if not PY3 and isinstance(value, text_type):
1705
value = value.encode(get_user_encoding())
1706
os.environ[env_variable] = value
1710
_validWin32PathRE = re.compile(r'^([A-Za-z]:[/\\])?[^:<>*"?\|]*$')
1713
def check_legal_path(path):
1714
"""Check whether the supplied path is legal.
1715
This is only required on Windows, so we don't test on other platforms
1718
if sys.platform != "win32":
1720
if _validWin32PathRE.match(path) is None:
1721
raise errors.IllegalPath(path)
1724
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1727
def _is_error_enotdir(e):
1728
"""Check if this exception represents ENOTDIR.
1730
Unfortunately, python is very inconsistent about the exception
1731
here. The cases are:
1732
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1733
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1734
which is the windows error code.
1735
3) Windows, Python2.5 uses errno == EINVAL and
1736
winerror == ERROR_DIRECTORY
1738
:param e: An Exception object (expected to be OSError with an errno
1739
attribute, but we should be able to cope with anything)
1740
:return: True if this represents an ENOTDIR error. False otherwise.
1742
en = getattr(e, 'errno', None)
1743
if (en == errno.ENOTDIR or
1744
(sys.platform == 'win32' and
1745
(en == _WIN32_ERROR_DIRECTORY or
1747
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
1753
def walkdirs(top, prefix=""):
1754
"""Yield data about all the directories in a tree.
1756
This yields all the data about the contents of a directory at a time.
1757
After each directory has been yielded, if the caller has mutated the list
1758
to exclude some directories, they are then not descended into.
1760
The data yielded is of the form:
1761
((directory-relpath, directory-path-from-top),
1762
[(relpath, basename, kind, lstat, path-from-top), ...]),
1763
- directory-relpath is the relative path of the directory being returned
1764
with respect to top. prefix is prepended to this.
1765
- directory-path-from-root is the path including top for this directory.
1766
It is suitable for use with os functions.
1767
- relpath is the relative path within the subtree being walked.
1768
- basename is the basename of the path
1769
- kind is the kind of the file now. If unknown then the file is not
1770
present within the tree - but it may be recorded as versioned. See
1772
- lstat is the stat data *if* the file was statted.
1773
- planned, not implemented:
1774
path_from_tree_root is the path from the root of the tree.
1776
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1777
allows one to walk a subtree but get paths that are relative to a tree
1779
:return: an iterator over the dirs.
1781
# TODO there is a bit of a smell where the results of the directory-
1782
# summary in this, and the path from the root, may not agree
1783
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
1784
# potentially confusing output. We should make this more robust - but
1785
# not at a speed cost. RBC 20060731
1787
_directory = _directory_kind
1788
_listdir = os.listdir
1789
_kind_from_mode = file_kind_from_stat_mode
1790
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
1792
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1793
relroot, _, _, _, top = pending.pop()
1795
relprefix = relroot + u'/'
1798
top_slash = top + u'/'
1801
append = dirblock.append
1803
names = sorted(map(decode_filename, _listdir(top)))
1804
except OSError as e:
1805
if not _is_error_enotdir(e):
1809
abspath = top_slash + name
1810
statvalue = _lstat(abspath)
1811
kind = _kind_from_mode(statvalue.st_mode)
1812
append((relprefix + name, name, kind, statvalue, abspath))
1813
yield (relroot, top), dirblock
1815
# push the user specified dirs from dirblock
1816
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1819
class DirReader(object):
1820
"""An interface for reading directories."""
1822
def top_prefix_to_starting_dir(self, top, prefix=""):
1823
"""Converts top and prefix to a starting dir entry
1825
:param top: A utf8 path
1826
:param prefix: An optional utf8 path to prefix output relative paths
1828
:return: A tuple starting with prefix, and ending with the native
1831
raise NotImplementedError(self.top_prefix_to_starting_dir)
1833
def read_dir(self, prefix, top):
1834
"""Read a specific dir.
1836
:param prefix: A utf8 prefix to be preprended to the path basenames.
1837
:param top: A natively encoded path to read.
1838
:return: A list of the directories contents. Each item contains:
1839
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1841
raise NotImplementedError(self.read_dir)
1844
_selected_dir_reader = None
1847
def _walkdirs_utf8(top, prefix=""):
1848
"""Yield data about all the directories in a tree.
1850
This yields the same information as walkdirs() only each entry is yielded
1851
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1852
are returned as exact byte-strings.
1854
:return: yields a tuple of (dir_info, [file_info])
1855
dir_info is (utf8_relpath, path-from-top)
1856
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1857
if top is an absolute path, path-from-top is also an absolute path.
1858
path-from-top might be unicode or utf8, but it is the correct path to
1859
pass to os functions to affect the file in question. (such as os.lstat)
1861
global _selected_dir_reader
1862
if _selected_dir_reader is None:
1863
if sys.platform == "win32":
1865
from ._walkdirs_win32 import Win32ReadDir
1866
_selected_dir_reader = Win32ReadDir()
1869
elif _fs_enc in ('utf-8', 'ascii'):
1871
from ._readdir_pyx import UTF8DirReader
1872
_selected_dir_reader = UTF8DirReader()
1873
except ImportError as e:
1874
failed_to_load_extension(e)
1877
if _selected_dir_reader is None:
1878
# Fallback to the python version
1879
_selected_dir_reader = UnicodeDirReader()
1881
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1882
# But we don't actually uses 1-3 in pending, so set them to None
1883
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1884
read_dir = _selected_dir_reader.read_dir
1885
_directory = _directory_kind
1887
relroot, _, _, _, top = pending[-1].pop()
1890
dirblock = sorted(read_dir(relroot, top))
1891
yield (relroot, top), dirblock
1892
# push the user specified dirs from dirblock
1893
next = [d for d in reversed(dirblock) if d[2] == _directory]
1895
pending.append(next)
1898
class UnicodeDirReader(DirReader):
1899
"""A dir reader for non-utf8 file systems, which transcodes."""
1901
__slots__ = ['_utf8_encode']
1904
self._utf8_encode = codecs.getencoder('utf8')
1906
def top_prefix_to_starting_dir(self, top, prefix=""):
1907
"""See DirReader.top_prefix_to_starting_dir."""
1908
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1910
def read_dir(self, prefix, top):
1911
"""Read a single directory from a non-utf8 file system.
1913
top, and the abspath element in the output are unicode, all other paths
1914
are utf8. Local disk IO is done via unicode calls to listdir etc.
1916
This is currently the fallback code path when the filesystem encoding is
1917
not UTF-8. It may be better to implement an alternative so that we can
1918
safely handle paths that are not properly decodable in the current
1921
See DirReader.read_dir for details.
1923
_utf8_encode = self._utf8_encode
1925
def _fs_decode(s): return s.decode(_fs_enc)
1927
def _fs_encode(s): return s.encode(_fs_enc)
1929
_listdir = os.listdir
1930
_kind_from_mode = file_kind_from_stat_mode
1933
relprefix = prefix + b'/'
1936
top_slash = top + '/'
1939
append = dirblock.append
1940
for name_native in _listdir(top.encode('utf-8')):
1942
name = _fs_decode(name_native)
1943
except UnicodeDecodeError:
1944
raise errors.BadFilenameEncoding(
1945
relprefix + name_native, _fs_enc)
1946
name_utf8 = _utf8_encode(name)[0]
1947
abspath = top_slash + name
1948
statvalue = _lstat(abspath)
1949
kind = _kind_from_mode(statvalue.st_mode)
1950
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
1951
return sorted(dirblock)
1954
def copy_tree(from_path, to_path, handlers={}):
1955
"""Copy all of the entries in from_path into to_path.
1957
:param from_path: The base directory to copy.
1958
:param to_path: The target directory. If it does not exist, it will
1960
:param handlers: A dictionary of functions, which takes a source and
1961
destinations for files, directories, etc.
1962
It is keyed on the file kind, such as 'directory', 'symlink', or 'file'
1963
'file', 'directory', and 'symlink' should always exist.
1964
If they are missing, they will be replaced with 'os.mkdir()',
1965
'os.readlink() + os.symlink()', and 'shutil.copy2()', respectively.
1967
# Now, just copy the existing cached tree to the new location
1968
# We use a cheap trick here.
1969
# Absolute paths are prefixed with the first parameter
1970
# relative paths are prefixed with the second.
1971
# So we can get both the source and target returned
1972
# without any extra work.
1974
def copy_dir(source, dest):
1977
def copy_link(source, dest):
1978
"""Copy the contents of a symlink"""
1979
link_to = os.readlink(source)
1980
os.symlink(link_to, dest)
1982
real_handlers = {'file': shutil.copy2,
1983
'symlink': copy_link,
1984
'directory': copy_dir,
1986
real_handlers.update(handlers)
1988
if not os.path.exists(to_path):
1989
real_handlers['directory'](from_path, to_path)
1991
for dir_info, entries in walkdirs(from_path, prefix=to_path):
1992
for relpath, name, kind, st, abspath in entries:
1993
real_handlers[kind](abspath, relpath)
1996
def copy_ownership_from_path(dst, src=None):
1997
"""Copy usr/grp ownership from src file/dir to dst file/dir.
1999
If src is None, the containing directory is used as source. If chown
2000
fails, the error is ignored and a warning is printed.
2002
chown = getattr(os, 'chown', None)
2007
src = os.path.dirname(dst)
2013
chown(dst, s.st_uid, s.st_gid)
2016
'Unable to copy ownership from "%s" to "%s". '
2017
'You may want to set it manually.', src, dst)
2018
trace.log_exception_quietly()
2021
def path_prefix_key(path):
2022
"""Generate a prefix-order path key for path.
2024
This can be used to sort paths in the same way that walkdirs does.
2026
return (dirname(path), path)
2029
def compare_paths_prefix_order(path_a, path_b):
2030
"""Compare path_a and path_b to generate the same order walkdirs uses."""
2031
key_a = path_prefix_key(path_a)
2032
key_b = path_prefix_key(path_b)
2033
return (key_a > key_b) - (key_a < key_b)
2036
_cached_user_encoding = None
2039
def get_user_encoding():
2040
"""Find out what the preferred user encoding is.
2042
This is generally the encoding that is used for command line parameters
2043
and file contents. This may be different from the terminal encoding
2044
or the filesystem encoding.
2046
:return: A string defining the preferred user encoding
2048
global _cached_user_encoding
2049
if _cached_user_encoding is not None:
2050
return _cached_user_encoding
2052
if os.name == 'posix' and getattr(locale, 'CODESET', None) is not None:
2053
# Use the existing locale settings and call nl_langinfo directly
2054
# rather than going through getpreferredencoding. This avoids
2055
# <http://bugs.python.org/issue6202> on OSX Python 2.6 and the
2056
# possibility of the setlocale call throwing an error.
2057
user_encoding = locale.nl_langinfo(locale.CODESET)
2059
# GZ 2011-12-19: On windows could call GetACP directly instead.
2060
user_encoding = locale.getpreferredencoding(False)
2063
user_encoding = codecs.lookup(user_encoding).name
2065
if user_encoding not in ("", "cp0"):
2066
sys.stderr.write('brz: warning:'
2067
' unknown encoding %s.'
2068
' Continuing with ascii encoding.\n'
2071
user_encoding = 'ascii'
2073
# Get 'ascii' when setlocale has not been called or LANG=C or unset.
2074
if user_encoding == 'ascii':
2075
if sys.platform == 'darwin':
2076
# OSX is special-cased in Python to have a UTF-8 filesystem
2077
# encoding and previously had LANG set here if not present.
2078
user_encoding = 'utf-8'
2079
# GZ 2011-12-19: Maybe UTF-8 should be the default in this case
2080
# for some other posix platforms as well.
2082
_cached_user_encoding = user_encoding
2083
return user_encoding
2086
def get_diff_header_encoding():
2087
return get_terminal_encoding()
2090
def get_host_name():
2091
"""Return the current unicode host name.
2093
This is meant to be used in place of socket.gethostname() because that
2094
behaves inconsistently on different platforms.
2096
if sys.platform == "win32":
2097
return win32utils.get_host_name()
2101
return socket.gethostname()
2102
return socket.gethostname().decode(get_user_encoding())
2105
# We must not read/write any more than 64k at a time from/to a socket so we
2106
# don't risk "no buffer space available" errors on some platforms. Windows in
2107
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
2109
MAX_SOCKET_CHUNK = 64 * 1024
2111
_end_of_stream_errors = [errno.ECONNRESET, errno.EPIPE, errno.EINVAL]
2112
for _eno in ['WSAECONNRESET', 'WSAECONNABORTED']:
2113
_eno = getattr(errno, _eno, None)
2114
if _eno is not None:
2115
_end_of_stream_errors.append(_eno)
2119
def read_bytes_from_socket(sock, report_activity=None,
2120
max_read_size=MAX_SOCKET_CHUNK):
2121
"""Read up to max_read_size of bytes from sock and notify of progress.
2123
Translates "Connection reset by peer" into file-like EOF (return an
2124
empty string rather than raise an error), and repeats the recv if
2125
interrupted by a signal.
2129
data = sock.recv(max_read_size)
2130
except socket.error as e:
2132
if eno in _end_of_stream_errors:
2133
# The connection was closed by the other side. Callers expect
2134
# an empty string to signal end-of-stream.
2136
elif eno == errno.EINTR:
2137
# Retry the interrupted recv.
2141
if report_activity is not None:
2142
report_activity(len(data), 'read')
2146
def recv_all(socket, count):
2147
"""Receive an exact number of bytes.
2149
Regular Socket.recv() may return less than the requested number of bytes,
2150
depending on what's in the OS buffer. MSG_WAITALL is not available
2151
on all platforms, but this should work everywhere. This will return
2152
less than the requested amount if the remote end closes.
2154
This isn't optimized and is intended mostly for use in testing.
2157
while len(b) < count:
2158
new = read_bytes_from_socket(socket, None, count - len(b))
2165
def send_all(sock, bytes, report_activity=None):
2166
"""Send all bytes on a socket.
2168
Breaks large blocks in smaller chunks to avoid buffering limitations on
2169
some platforms, and catches EINTR which may be thrown if the send is
2170
interrupted by a signal.
2172
This is preferred to socket.sendall(), because it avoids portability bugs
2173
and provides activity reporting.
2175
:param report_activity: Call this as bytes are read, see
2176
Transport._report_activity
2179
byte_count = len(bytes)
2180
view = memoryview(bytes)
2181
while sent_total < byte_count:
2183
sent = sock.send(view[sent_total:sent_total + MAX_SOCKET_CHUNK])
2184
except (socket.error, IOError) as e:
2185
if e.args[0] in _end_of_stream_errors:
2186
raise errors.ConnectionReset(
2187
"Error trying to write to socket", e)
2188
if e.args[0] != errno.EINTR:
2192
raise errors.ConnectionReset('Sending to %s returned 0 bytes'
2195
if report_activity is not None:
2196
report_activity(sent, 'write')
2199
def connect_socket(address):
2200
# Slight variation of the socket.create_connection() function (provided by
2201
# python-2.6) that can fail if getaddrinfo returns an empty list. We also
2202
# provide it for previous python versions. Also, we don't use the timeout
2203
# parameter (provided by the python implementation) so we don't implement
2205
err = socket.error('getaddrinfo returns an empty list')
2206
host, port = address
2207
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
2208
af, socktype, proto, canonname, sa = res
2211
sock = socket.socket(af, socktype, proto)
2215
except socket.error as e:
2217
# 'err' is now the most recent error
2218
if sock is not None:
2223
def dereference_path(path):
2224
"""Determine the real path to a file.
2226
All parent elements are dereferenced. But the file itself is not
2228
:param path: The original path. May be absolute or relative.
2229
:return: the real path *to* the file
2231
parent, base = os.path.split(path)
2232
# The pathjoin for '.' is a workaround for Python bug #1213894.
2233
# (initial path components aren't dereferenced)
2234
return pathjoin(realpath(pathjoin('.', parent)), base)
2237
def supports_mapi():
2238
"""Return True if we can use MAPI to launch a mail client."""
2239
return sys.platform == "win32"
2242
def resource_string(package, resource_name):
2243
"""Load a resource from a package and return it as a string.
2245
Note: Only packages that start with breezy are currently supported.
2247
This is designed to be a lightweight implementation of resource
2248
loading in a way which is API compatible with the same API from
2250
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2251
If and when pkg_resources becomes a standard library, this routine
2254
# Check package name is within breezy
2255
if package == "breezy":
2256
resource_relpath = resource_name
2257
elif package.startswith("breezy."):
2258
package = package[len("breezy."):].replace('.', os.sep)
2259
resource_relpath = pathjoin(package, resource_name)
2261
raise errors.BzrError('resource package %s not in breezy' % package)
2263
# Map the resource to a file and read its contents
2264
base = dirname(breezy.__file__)
2265
if getattr(sys, 'frozen', None): # bzr.exe
2266
base = abspath(pathjoin(base, '..', '..'))
2267
with open(pathjoin(base, resource_relpath), "rt") as f:
2271
def file_kind_from_stat_mode_thunk(mode):
2272
global file_kind_from_stat_mode
2273
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2275
from ._readdir_pyx import UTF8DirReader
2276
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2278
# This is one time where we won't warn that an extension failed to
2279
# load. The extension is never available on Windows anyway.
2280
from ._readdir_py import (
2281
_kind_from_mode as file_kind_from_stat_mode
2283
return file_kind_from_stat_mode(mode)
2286
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2289
def file_stat(f, _lstat=os.lstat):
2293
except OSError as e:
2294
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2295
raise errors.NoSuchFile(f)
2299
def file_kind(f, _lstat=os.lstat):
2300
stat_value = file_stat(f, _lstat)
2301
return file_kind_from_stat_mode(stat_value.st_mode)
2304
def until_no_eintr(f, *a, **kw):
2305
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2307
WARNING: you must be certain that it is safe to retry the call repeatedly
2308
if EINTR does occur. This is typically only true for low-level operations
2309
like os.read. If in any doubt, don't use this.
2311
Keep in mind that this is not a complete solution to EINTR. There is
2312
probably code in the Python standard library and other dependencies that
2313
may encounter EINTR if a signal arrives (and there is signal handler for
2314
that signal). So this function can reduce the impact for IO that breezy
2315
directly controls, but it is not a complete solution.
2317
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2321
except (IOError, OSError) as e:
2322
if e.errno == errno.EINTR:
2327
if sys.platform == "win32":
2330
return msvcrt.getch()
2335
fd = sys.stdin.fileno()
2336
settings = termios.tcgetattr(fd)
2339
ch = sys.stdin.read(1)
2341
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2344
if sys.platform.startswith('linux'):
2345
def _local_concurrency():
2347
return os.sysconf('SC_NPROCESSORS_ONLN')
2348
except (ValueError, OSError, AttributeError):
2350
elif sys.platform == 'darwin':
2351
def _local_concurrency():
2352
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2353
stdout=subprocess.PIPE).communicate()[0]
2354
elif "bsd" in sys.platform:
2355
def _local_concurrency():
2356
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2357
stdout=subprocess.PIPE).communicate()[0]
2358
elif sys.platform == 'sunos5':
2359
def _local_concurrency():
2360
return subprocess.Popen(['psrinfo', '-p', ],
2361
stdout=subprocess.PIPE).communicate()[0]
2362
elif sys.platform == "win32":
2363
def _local_concurrency():
2364
# This appears to return the number of cores.
2365
return os.environ.get('NUMBER_OF_PROCESSORS')
2367
def _local_concurrency():
2372
_cached_local_concurrency = None
2375
def local_concurrency(use_cache=True):
2376
"""Return how many processes can be run concurrently.
2378
Rely on platform specific implementations and default to 1 (one) if
2379
anything goes wrong.
2381
global _cached_local_concurrency
2383
if _cached_local_concurrency is not None and use_cache:
2384
return _cached_local_concurrency
2386
concurrency = os.environ.get('BRZ_CONCURRENCY', None)
2387
if concurrency is None:
2388
import multiprocessing
2390
concurrency = multiprocessing.cpu_count()
2391
except NotImplementedError:
2392
# multiprocessing.cpu_count() isn't implemented on all platforms
2394
concurrency = _local_concurrency()
2395
except (OSError, IOError):
2398
concurrency = int(concurrency)
2399
except (TypeError, ValueError):
2402
_cached_local_concurrency = concurrency
2406
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2407
"""A stream writer that doesn't decode str arguments."""
2409
def __init__(self, encode, stream, errors='strict'):
2410
codecs.StreamWriter.__init__(self, stream, errors)
2411
self.encode = encode
2413
def write(self, object):
2414
if isinstance(object, str):
2415
self.stream.write(object)
2417
data, _ = self.encode(object, self.errors)
2418
self.stream.write(data)
2421
if sys.platform == 'win32':
2422
def open_file(filename, mode='r', bufsize=-1):
2423
"""This function is used to override the ``open`` builtin.
2425
But it uses O_NOINHERIT flag so the file handle is not inherited by
2426
child processes. Deleting or renaming a closed file opened with this
2427
function is not blocking child processes.
2429
writing = 'w' in mode
2430
appending = 'a' in mode
2431
updating = '+' in mode
2432
binary = 'b' in mode
2435
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2436
# for flags for each modes.
2446
flags |= os.O_WRONLY
2447
flags |= os.O_CREAT | os.O_TRUNC
2452
flags |= os.O_WRONLY
2453
flags |= os.O_CREAT | os.O_APPEND
2458
flags |= os.O_RDONLY
2460
return os.fdopen(os.open(filename, flags), mode, bufsize)
2465
def available_backup_name(base, exists):
2466
"""Find a non-existing backup file name.
2468
This will *not* create anything, this only return a 'free' entry. This
2469
should be used for checking names in a directory below a locked
2470
tree/branch/repo to avoid race conditions. This is LBYL (Look Before You
2471
Leap) and generally discouraged.
2473
:param base: The base name.
2475
:param exists: A callable returning True if the path parameter exists.
2478
name = "%s.~%d~" % (base, counter)
2481
name = "%s.~%d~" % (base, counter)
2485
def set_fd_cloexec(fd):
2486
"""Set a Unix file descriptor's FD_CLOEXEC flag. Do nothing if platform
2487
support for this is not available.
2491
old = fcntl.fcntl(fd, fcntl.F_GETFD)
2492
fcntl.fcntl(fd, fcntl.F_SETFD, old | fcntl.FD_CLOEXEC)
2493
except (ImportError, AttributeError):
2494
# Either the fcntl module or specific constants are not present
2498
def find_executable_on_path(name):
2499
"""Finds an executable on the PATH.
2501
On Windows, this will try to append each extension in the PATHEXT
2502
environment variable to the name, if it cannot be found with the name
2505
:param name: The base name of the executable.
2506
:return: The path to the executable found or None.
2508
if sys.platform == 'win32':
2509
exts = os.environ.get('PATHEXT', '').split(os.pathsep)
2510
exts = [ext.lower() for ext in exts]
2511
base, ext = os.path.splitext(name)
2513
if ext.lower() not in exts:
2519
path = os.environ.get('PATH')
2520
if path is not None:
2521
path = path.split(os.pathsep)
2524
f = os.path.join(d, name) + ext
2525
if os.access(f, os.X_OK):
2527
if sys.platform == 'win32':
2528
app_path = win32utils.get_app_path(name)
2529
if app_path != name:
2534
def _posix_is_local_pid_dead(pid):
2535
"""True if pid doesn't correspond to live process on this machine"""
2537
# Special meaning of unix kill: just check if it's there.
2539
except OSError as e:
2540
if e.errno == errno.ESRCH:
2541
# On this machine, and really not found: as sure as we can be
2544
elif e.errno == errno.EPERM:
2545
# exists, though not ours
2548
trace.mutter("os.kill(%d, 0) failed: %s" % (pid, e))
2549
# Don't really know.
2552
# Exists and our process: not dead.
2556
if sys.platform == "win32":
2557
is_local_pid_dead = win32utils.is_local_pid_dead
2559
is_local_pid_dead = _posix_is_local_pid_dead
2561
_maybe_ignored = ['EAGAIN', 'EINTR', 'ENOTSUP', 'EOPNOTSUPP', 'EACCES']
2562
_fdatasync_ignored = [getattr(errno, name) for name in _maybe_ignored
2563
if getattr(errno, name, None) is not None]
2566
def fdatasync(fileno):
2567
"""Flush file contents to disk if possible.
2569
:param fileno: Integer OS file handle.
2570
:raises TransportNotPossible: If flushing to disk is not possible.
2572
fn = getattr(os, 'fdatasync', getattr(os, 'fsync', None))
2576
except IOError as e:
2577
# See bug #1075108, on some platforms fdatasync exists, but can
2578
# raise ENOTSUP. However, we are calling fdatasync to be helpful
2579
# and reduce the chance of corruption-on-powerloss situations. It
2580
# is not a mandatory call, so it is ok to suppress failures.
2581
trace.mutter("ignoring error calling fdatasync: %s" % (e,))
2582
if getattr(e, 'errno', None) not in _fdatasync_ignored:
2586
def ensure_empty_directory_exists(path, exception_class):
2587
"""Make sure a local directory exists and is empty.
2589
If it does not exist, it is created. If it exists and is not empty, an
2590
instance of exception_class is raised.
2594
except OSError as e:
2595
if e.errno != errno.EEXIST:
2597
if os.listdir(path) != []:
2598
raise exception_class(path)
2601
def is_environment_error(evalue):
2602
"""True if exception instance is due to a process environment issue
2604
This includes OSError and IOError, but also other errors that come from
2605
the operating system or core libraries but are not subclasses of those.
2607
if isinstance(evalue, (EnvironmentError, select.error)):
2609
if sys.platform == "win32" and win32utils._is_pywintypes_error(evalue):