24
34
realpath as _nt_realpath,
25
35
splitdrive as _nt_splitdrive,
28
from os import listdir
33
from shutil import copyfile
35
from stat import (S_ISREG, S_ISDIR, S_ISLNK, ST_MODE, ST_SIZE,
36
S_ISCHR, S_ISBLK, S_ISFIFO, S_ISSOCK)
45
from tempfile import (
58
from bzrlib.symbol_versioning import (
63
# sha and md5 modules are deprecated in python2.6 but hashlib is available as
65
if sys.version_info < (2, 5):
66
import md5 as _mod_md5
68
import sha as _mod_sha
45
from bzrlib.errors import (BzrError,
46
BzrBadParameterNotUnicode,
51
from bzrlib.symbol_versioning import (deprecated_function,
53
from bzrlib.trace import mutter
78
from bzrlib import symbol_versioning
81
# Cross platform wall-clock time functionality with decent resolution.
82
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
83
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
84
# synchronized with ``time.time()``, this is only meant to be used to find
85
# delta times by subtracting from another call to this function.
86
timer_func = time.time
87
if sys.platform == 'win32':
88
timer_func = time.clock
56
90
# On win32, O_BINARY is used to indicate the file should
57
91
# be opened in binary mode, rather than text mode.
58
92
# On other platforms, O_BINARY doesn't exist, because
59
93
# they always open in binary mode, so it is okay to
60
# OR with 0 on those platforms
94
# OR with 0 on those platforms.
95
# O_NOINHERIT and O_TEXT exists only on win32 too.
61
96
O_BINARY = getattr(os, 'O_BINARY', 0)
97
O_TEXT = getattr(os, 'O_TEXT', 0)
98
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
101
def get_unicode_argv():
103
user_encoding = get_user_encoding()
104
return [a.decode(user_encoding) for a in sys.argv[1:]]
105
except UnicodeDecodeError:
106
raise errors.BzrError(("Parameter '%r' is unsupported by the current "
64
110
def make_readonly(filename):
65
111
"""Make a filename read-only."""
66
mod = os.stat(filename).st_mode
68
os.chmod(filename, mod)
112
mod = os.lstat(filename).st_mode
113
if not stat.S_ISLNK(mod):
115
os.chmod(filename, mod)
71
118
def make_writable(filename):
72
mod = os.stat(filename).st_mode
74
os.chmod(filename, mod)
119
mod = os.lstat(filename).st_mode
120
if not stat.S_ISLNK(mod):
122
os.chmod(filename, mod)
125
def minimum_path_selection(paths):
126
"""Return the smallset subset of paths which are outside paths.
128
:param paths: A container (and hence not None) of paths.
129
:return: A set of paths sufficient to include everything in paths via
130
is_inside, drawn from the paths parameter.
136
return path.split('/')
137
sorted_paths = sorted(list(paths), key=sort_key)
139
search_paths = [sorted_paths[0]]
140
for path in sorted_paths[1:]:
141
if not is_inside(search_paths[-1], path):
142
# This path is unique, add it
143
search_paths.append(path)
145
return set(search_paths)
482
559
for dirname in dir_list:
483
560
if is_inside(dirname, fname) or is_inside(fname, dirname):
565
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
566
report_activity=None, direction='read'):
567
"""Copy contents of one file to another.
569
The read_length can either be -1 to read to end-of-file (EOF) or
570
it can specify the maximum number of bytes to read.
572
The buff_size represents the maximum size for each read operation
573
performed on from_file.
575
:param report_activity: Call this as bytes are read, see
576
Transport._report_activity
577
:param direction: Will be passed to report_activity
579
:return: The number of bytes copied.
583
# read specified number of bytes
585
while read_length > 0:
586
num_bytes_to_read = min(read_length, buff_size)
588
block = from_file.read(num_bytes_to_read)
592
if report_activity is not None:
593
report_activity(len(block), direction)
596
actual_bytes_read = len(block)
597
read_length -= actual_bytes_read
598
length += actual_bytes_read
489
def pumpfile(fromfile, tofile):
490
"""Copy contents of one file to another."""
493
b = fromfile.read(BUFSIZE)
602
block = from_file.read(buff_size)
606
if report_activity is not None:
607
report_activity(len(block), direction)
613
def pump_string_file(bytes, file_handle, segment_size=None):
614
"""Write bytes to file_handle in many smaller writes.
616
:param bytes: The string to write.
617
:param file_handle: The file to write to.
619
# Write data in chunks rather than all at once, because very large
620
# writes fail on some platforms (e.g. Windows with SMB mounted
623
segment_size = 5242880 # 5MB
624
segments = range(len(bytes) / segment_size + 1)
625
write = file_handle.write
626
for segment_index in segments:
627
segment = buffer(bytes, segment_index * segment_size, segment_size)
499
631
def file_iterator(input_file, readsize=32768):
555
715
def local_time_offset(t=None):
556
716
"""Return offset of local zone from GMT, either at present or at time t."""
557
# python2.3 localtime() can't take None
561
if time.localtime(t).tm_isdst and time.daylight:
564
return -time.timezone
567
def format_date(t, offset=0, timezone='original', date_fmt=None,
719
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
720
return offset.days * 86400 + offset.seconds
722
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
723
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
726
def format_date(t, offset=0, timezone='original', date_fmt=None,
568
727
show_offset=True):
569
## TODO: Perhaps a global option to use either universal or local time?
570
## Or perhaps just let people set $TZ?
571
assert isinstance(t, float)
728
"""Return a formatted date string.
730
:param t: Seconds since the epoch.
731
:param offset: Timezone offset in seconds east of utc.
732
:param timezone: How to display the time: 'utc', 'original' for the
733
timezone specified by offset, or 'local' for the process's current
735
:param date_fmt: strftime format.
736
:param show_offset: Whether to append the timezone.
738
(date_fmt, tt, offset_str) = \
739
_format_date(t, offset, timezone, date_fmt, show_offset)
740
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
741
date_str = time.strftime(date_fmt, tt)
742
return date_str + offset_str
745
# Cache of formatted offset strings
749
def format_date_with_offset_in_original_timezone(t, offset=0,
750
_cache=_offset_cache):
751
"""Return a formatted date string in the original timezone.
753
This routine may be faster then format_date.
755
:param t: Seconds since the epoch.
756
:param offset: Timezone offset in seconds east of utc.
760
tt = time.gmtime(t + offset)
761
date_fmt = _default_format_by_weekday_num[tt[6]]
762
date_str = time.strftime(date_fmt, tt)
763
offset_str = _cache.get(offset, None)
764
if offset_str is None:
765
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
766
_cache[offset] = offset_str
767
return date_str + offset_str
770
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
772
"""Return an unicode date string formatted according to the current locale.
774
:param t: Seconds since the epoch.
775
:param offset: Timezone offset in seconds east of utc.
776
:param timezone: How to display the time: 'utc', 'original' for the
777
timezone specified by offset, or 'local' for the process's current
779
:param date_fmt: strftime format.
780
:param show_offset: Whether to append the timezone.
782
(date_fmt, tt, offset_str) = \
783
_format_date(t, offset, timezone, date_fmt, show_offset)
784
date_str = time.strftime(date_fmt, tt)
785
if not isinstance(date_str, unicode):
786
date_str = date_str.decode(get_user_encoding(), 'replace')
787
return date_str + offset_str
790
def _format_date(t, offset, timezone, date_fmt, show_offset):
573
791
if timezone == 'utc':
574
792
tt = time.gmtime(t)
581
799
tt = time.localtime(t)
582
800
offset = local_time_offset(t)
584
raise BzrError("unsupported timezone format %r" % timezone,
585
['options are "utc", "original", "local"'])
802
raise errors.UnsupportedTimezoneFormat(timezone)
586
803
if date_fmt is None:
587
804
date_fmt = "%a %Y-%m-%d %H:%M:%S"
589
806
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
592
return (time.strftime(date_fmt, tt) + offset_str)
809
return (date_fmt, tt, offset_str)
595
812
def compact_date(when):
596
813
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
816
def format_delta(delta):
817
"""Get a nice looking string for a time delta.
819
:param delta: The time difference in seconds, can be positive or negative.
820
positive indicates time in the past, negative indicates time in the
821
future. (usually time.time() - stored_time)
822
:return: String formatted to show approximate resolution
828
direction = 'in the future'
832
if seconds < 90: # print seconds up to 90 seconds
834
return '%d second %s' % (seconds, direction,)
836
return '%d seconds %s' % (seconds, direction)
838
minutes = int(seconds / 60)
839
seconds -= 60 * minutes
844
if minutes < 90: # print minutes, seconds up to 90 minutes
846
return '%d minute, %d second%s %s' % (
847
minutes, seconds, plural_seconds, direction)
849
return '%d minutes, %d second%s %s' % (
850
minutes, seconds, plural_seconds, direction)
852
hours = int(minutes / 60)
853
minutes -= 60 * hours
860
return '%d hour, %d minute%s %s' % (hours, minutes,
861
plural_minutes, direction)
862
return '%d hours, %d minute%s %s' % (hours, minutes,
863
plural_minutes, direction)
601
866
"""Return size of given open file."""
669
raise BzrError("sorry, %r not allowed in path" % f)
918
raise errors.BzrError("sorry, %r not allowed in path" % f)
670
919
elif (f == '.') or (f == ''):
677
assert isinstance(p, list)
679
928
if (f == '..') or (f is None) or (f == ''):
680
raise BzrError("sorry, %r not allowed in path" % f)
929
raise errors.BzrError("sorry, %r not allowed in path" % f)
681
930
return pathjoin(*p)
684
@deprecated_function(zero_nine)
685
def appendpath(p1, p2):
689
return pathjoin(p1, p2)
933
def parent_directories(filename):
934
"""Return the list of parent directories, deepest first.
936
For example, parent_directories("a/b/c") -> ["a/b", "a"].
939
parts = splitpath(dirname(filename))
941
parents.append(joinpath(parts))
946
_extension_load_failures = []
949
def failed_to_load_extension(exception):
950
"""Handle failing to load a binary extension.
952
This should be called from the ImportError block guarding the attempt to
953
import the native extension. If this function returns, the pure-Python
954
implementation should be loaded instead::
957
>>> import bzrlib._fictional_extension_pyx
958
>>> except ImportError, e:
959
>>> bzrlib.osutils.failed_to_load_extension(e)
960
>>> import bzrlib._fictional_extension_py
962
# NB: This docstring is just an example, not a doctest, because doctest
963
# currently can't cope with the use of lazy imports in this namespace --
966
# This currently doesn't report the failure at the time it occurs, because
967
# they tend to happen very early in startup when we can't check config
968
# files etc, and also we want to report all failures but not spam the user
970
from bzrlib import trace
971
exception_str = str(exception)
972
if exception_str not in _extension_load_failures:
973
trace.mutter("failed to load compiled extension: %s" % exception_str)
974
_extension_load_failures.append(exception_str)
977
def report_extension_load_failures():
978
if not _extension_load_failures:
980
from bzrlib.config import GlobalConfig
981
if GlobalConfig().get_user_option_as_bool('ignore_missing_extensions'):
983
# the warnings framework should by default show this only once
984
from bzrlib.trace import warning
986
"bzr: warning: some compiled extensions could not be loaded; "
987
"see <https://answers.launchpad.net/bzr/+faq/703>")
988
# we no longer show the specific missing extensions here, because it makes
989
# the message too long and scary - see
990
# https://bugs.launchpad.net/bzr/+bug/430529
994
from bzrlib._chunks_to_lines_pyx import chunks_to_lines
995
except ImportError, e:
996
failed_to_load_extension(e)
997
from bzrlib._chunks_to_lines_py import chunks_to_lines
692
1000
def split_lines(s):
693
1001
"""Split s into lines, but without removing the newline characters."""
1002
# Trivially convert a fulltext into a 'chunked' representation, and let
1003
# chunks_to_lines do the heavy lifting.
1004
if isinstance(s, str):
1005
# chunks_to_lines only supports 8-bit strings
1006
return chunks_to_lines([s])
1008
return _split_lines(s)
1011
def _split_lines(s):
1012
"""Split s into lines, but without removing the newline characters.
1014
This supports Unicode or plain string objects.
694
1016
lines = s.split('\n')
695
1017
result = [line + '\n' for line in lines[:-1]]
761
1141
avoids that problem.
764
assert len(base) >= MIN_ABS_PATHLENGTH, ('Length of base must be equal or'
765
' exceed the platform minimum length (which is %d)' %
1144
if len(base) < MIN_ABS_PATHLENGTH:
1145
# must have space for e.g. a drive letter
1146
raise ValueError('%r is too short to calculate a relative path'
768
1149
rp = abspath(path)
772
while len(head) >= len(base):
1154
if len(head) <= len(base) and head != base:
1155
raise errors.PathNotChild(rp, base)
773
1156
if head == base:
775
head, tail = os.path.split(head)
1158
head, tail = split(head)
779
raise PathNotChild(rp, base)
1163
return pathjoin(*reversed(s))
1168
def _cicp_canonical_relpath(base, path):
1169
"""Return the canonical path relative to base.
1171
Like relpath, but on case-insensitive-case-preserving file-systems, this
1172
will return the relpath as stored on the file-system rather than in the
1173
case specified in the input string, for all existing portions of the path.
1175
This will cause O(N) behaviour if called for every path in a tree; if you
1176
have a number of paths to convert, you should use canonical_relpaths().
1178
# TODO: it should be possible to optimize this for Windows by using the
1179
# win32 API FindFiles function to look for the specified name - but using
1180
# os.listdir() still gives us the correct, platform agnostic semantics in
1183
rel = relpath(base, path)
1184
# '.' will have been turned into ''
1188
abs_base = abspath(base)
1190
_listdir = os.listdir
1192
# use an explicit iterator so we can easily consume the rest on early exit.
1193
bit_iter = iter(rel.split('/'))
1194
for bit in bit_iter:
1197
next_entries = _listdir(current)
1198
except OSError: # enoent, eperm, etc
1199
# We can't find this in the filesystem, so just append the
1201
current = pathjoin(current, bit, *list(bit_iter))
1203
for look in next_entries:
1204
if lbit == look.lower():
1205
current = pathjoin(current, look)
1208
# got to the end, nothing matched, so we just return the
1209
# non-existing bits as they were specified (the filename may be
1210
# the target of a move, for example).
1211
current = pathjoin(current, bit, *list(bit_iter))
1213
return current[len(abs_base):].lstrip('/')
1215
# XXX - TODO - we need better detection/integration of case-insensitive
1216
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1217
# filesystems), for example, so could probably benefit from the same basic
1218
# support there. For now though, only Windows and OSX get that support, and
1219
# they get it for *all* file-systems!
1220
if sys.platform in ('win32', 'darwin'):
1221
canonical_relpath = _cicp_canonical_relpath
1223
canonical_relpath = relpath
1225
def canonical_relpaths(base, paths):
1226
"""Create an iterable to canonicalize a sequence of relative paths.
1228
The intent is for this implementation to use a cache, vastly speeding
1229
up multiple transformations in the same directory.
1231
# but for now, we haven't optimized...
1232
return [canonical_relpath(base, p) for p in paths]
787
1234
def safe_unicode(unicode_or_utf8_string):
788
1235
"""Coerce unicode_or_utf8_string into unicode.
790
1237
If it is unicode, it is returned.
791
Otherwise it is decoded from utf-8. If a decoding error
792
occurs, it is wrapped as a If the decoding fails, the exception is wrapped
793
as a BzrBadParameter exception.
1238
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1239
wrapped in a BzrBadParameterNotUnicode exception.
795
1241
if isinstance(unicode_or_utf8_string, unicode):
796
1242
return unicode_or_utf8_string
798
1244
return unicode_or_utf8_string.decode('utf8')
799
1245
except UnicodeDecodeError:
800
raise BzrBadParameterNotUnicode(unicode_or_utf8_string)
1246
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1249
def safe_utf8(unicode_or_utf8_string):
1250
"""Coerce unicode_or_utf8_string to a utf8 string.
1252
If it is a str, it is returned.
1253
If it is Unicode, it is encoded into a utf-8 string.
1255
if isinstance(unicode_or_utf8_string, str):
1256
# TODO: jam 20070209 This is overkill, and probably has an impact on
1257
# performance if we are dealing with lots of apis that want a
1260
# Make sure it is a valid utf-8 string
1261
unicode_or_utf8_string.decode('utf-8')
1262
except UnicodeDecodeError:
1263
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1264
return unicode_or_utf8_string
1265
return unicode_or_utf8_string.encode('utf-8')
1268
_revision_id_warning = ('Unicode revision ids were deprecated in bzr 0.15.'
1269
' Revision id generators should be creating utf8'
1273
def safe_revision_id(unicode_or_utf8_string, warn=True):
1274
"""Revision ids should now be utf8, but at one point they were unicode.
1276
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1278
:param warn: Functions that are sanitizing user data can set warn=False
1279
:return: None or a utf8 revision id.
1281
if (unicode_or_utf8_string is None
1282
or unicode_or_utf8_string.__class__ == str):
1283
return unicode_or_utf8_string
1285
symbol_versioning.warn(_revision_id_warning, DeprecationWarning,
1287
return cache_utf8.encode(unicode_or_utf8_string)
1290
_file_id_warning = ('Unicode file ids were deprecated in bzr 0.15. File id'
1291
' generators should be creating utf8 file ids.')
1294
def safe_file_id(unicode_or_utf8_string, warn=True):
1295
"""File ids should now be utf8, but at one point they were unicode.
1297
This is the same as safe_utf8, except it uses the cached encode functions
1298
to save a little bit of performance.
1300
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1302
:param warn: Functions that are sanitizing user data can set warn=False
1303
:return: None or a utf8 file id.
1305
if (unicode_or_utf8_string is None
1306
or unicode_or_utf8_string.__class__ == str):
1307
return unicode_or_utf8_string
1309
symbol_versioning.warn(_file_id_warning, DeprecationWarning,
1311
return cache_utf8.encode(unicode_or_utf8_string)
803
1314
_platform_normalizes_filenames = False
844
1355
normalized_filename = _inaccessible_normalized_filename
1358
def set_signal_handler(signum, handler, restart_syscall=True):
1359
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1360
on platforms that support that.
1362
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1363
automatically restart (by calling `signal.siginterrupt(signum,
1364
False)`). May be ignored if the feature is not available on this
1365
platform or Python version.
1369
siginterrupt = signal.siginterrupt
1371
# This python implementation doesn't provide signal support, hence no
1374
except AttributeError:
1375
# siginterrupt doesn't exist on this platform, or for this version
1377
siginterrupt = lambda signum, flag: None
1379
def sig_handler(*args):
1380
# Python resets the siginterrupt flag when a signal is
1381
# received. <http://bugs.python.org/issue8354>
1382
# As a workaround for some cases, set it back the way we want it.
1383
siginterrupt(signum, False)
1384
# Now run the handler function passed to set_signal_handler.
1387
sig_handler = handler
1388
old_handler = signal.signal(signum, sig_handler)
1390
siginterrupt(signum, False)
1394
default_terminal_width = 80
1395
"""The default terminal width for ttys.
1397
This is defined so that higher levels can share a common fallback value when
1398
terminal_width() returns None.
847
1402
def terminal_width():
848
"""Return estimated terminal width."""
849
if sys.platform == 'win32':
850
import bzrlib.win32console
851
return bzrlib.win32console.get_console_size()[0]
1403
"""Return terminal width.
1405
None is returned if the width can't established precisely.
1408
- if BZR_COLUMNS is set, returns its value
1409
- if there is no controlling terminal, returns None
1410
- if COLUMNS is set, returns its value,
1412
From there, we need to query the OS to get the size of the controlling
1416
- get termios.TIOCGWINSZ
1417
- if an error occurs or a negative value is obtained, returns None
1421
- win32utils.get_console_size() decides,
1422
- returns None on error (provided default value)
1425
# If BZR_COLUMNS is set, take it, user is always right
1427
return int(os.environ['BZR_COLUMNS'])
1428
except (KeyError, ValueError):
1431
isatty = getattr(sys.stdout, 'isatty', None)
1432
if isatty is None or not isatty():
1433
# Don't guess, setting BZR_COLUMNS is the recommended way to override.
1436
# If COLUMNS is set, take it, the terminal knows better (even inside a
1437
# given terminal, the application can decide to set COLUMNS to a lower
1438
# value (splitted screen) or a bigger value (scroll bars))
1440
return int(os.environ['COLUMNS'])
1441
except (KeyError, ValueError):
1444
width, height = _terminal_size(None, None)
1446
# Consider invalid values as meaning no width
1452
def _win32_terminal_size(width, height):
1453
width, height = win32utils.get_console_size(defaultx=width, defaulty=height)
1454
return width, height
1457
def _ioctl_terminal_size(width, height):
854
1459
import struct, fcntl, termios
855
1460
s = struct.pack('HHHH', 0, 0, 0, 0)
856
1461
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
857
width = struct.unpack('HHHH', x)[1]
1462
height, width = struct.unpack('HHHH', x)[0:2]
1463
except (IOError, AttributeError):
1465
return width, height
1467
_terminal_size = None
1468
"""Returns the terminal size as (width, height).
1470
:param width: Default value for width.
1471
:param height: Default value for height.
1473
This is defined specifically for each OS and query the size of the controlling
1474
terminal. If any error occurs, the provided default values should be returned.
1476
if sys.platform == 'win32':
1477
_terminal_size = _win32_terminal_size
1479
_terminal_size = _ioctl_terminal_size
1482
def _terminal_size_changed(signum, frame):
1483
"""Set COLUMNS upon receiving a SIGnal for WINdow size CHange."""
1484
width, height = _terminal_size(None, None)
1485
if width is not None:
1486
os.environ['COLUMNS'] = str(width)
1489
_registered_sigwinch = False
1490
def watch_sigwinch():
1491
"""Register for SIGWINCH, once and only once.
1493
Do nothing if the signal module is not available.
1495
global _registered_sigwinch
1496
if not _registered_sigwinch:
862
width = int(os.environ['COLUMNS'])
1499
if getattr(signal, "SIGWINCH", None) is not None:
1500
set_signal_handler(signal.SIGWINCH, _terminal_size_changed)
1502
# python doesn't provide signal support, nothing we can do about it
1504
_registered_sigwinch = True
871
1507
def supports_executable():
872
1508
return sys.platform != "win32"
1511
def supports_posix_readonly():
1512
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1514
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1515
directory controls creation/deletion, etc.
1517
And under win32, readonly means that the directory itself cannot be
1518
deleted. The contents of a readonly directory can be changed, unlike POSIX
1519
where files in readonly directories cannot be added, deleted or renamed.
1521
return sys.platform != "win32"
875
1524
def set_or_unset_env(env_variable, value):
876
1525
"""Modify the environment, setting or removing the env_variable.
897
1546
def check_legal_path(path):
898
"""Check whether the supplied path is legal.
1547
"""Check whether the supplied path is legal.
899
1548
This is only required on Windows, so we don't test on other platforms
902
1551
if sys.platform != "win32":
904
1553
if _validWin32PathRE.match(path) is None:
905
raise IllegalPath(path)
1554
raise errors.IllegalPath(path)
1557
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1559
def _is_error_enotdir(e):
1560
"""Check if this exception represents ENOTDIR.
1562
Unfortunately, python is very inconsistent about the exception
1563
here. The cases are:
1564
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1565
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1566
which is the windows error code.
1567
3) Windows, Python2.5 uses errno == EINVAL and
1568
winerror == ERROR_DIRECTORY
1570
:param e: An Exception object (expected to be OSError with an errno
1571
attribute, but we should be able to cope with anything)
1572
:return: True if this represents an ENOTDIR error. False otherwise.
1574
en = getattr(e, 'errno', None)
1575
if (en == errno.ENOTDIR
1576
or (sys.platform == 'win32'
1577
and (en == _WIN32_ERROR_DIRECTORY
1578
or (en == errno.EINVAL
1579
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
908
1585
def walkdirs(top, prefix=""):
909
1586
"""Yield data about all the directories in a tree.
911
1588
This yields all the data about the contents of a directory at a time.
912
1589
After each directory has been yielded, if the caller has mutated the list
913
1590
to exclude some directories, they are then not descended into.
915
1592
The data yielded is of the form:
916
1593
((directory-relpath, directory-path-from-top),
917
[(relpath, basename, kind, lstat), ...]),
1594
[(relpath, basename, kind, lstat, path-from-top), ...]),
918
1595
- directory-relpath is the relative path of the directory being returned
919
1596
with respect to top. prefix is prepended to this.
920
- directory-path-from-root is the path including top for this directory.
1597
- directory-path-from-root is the path including top for this directory.
921
1598
It is suitable for use with os functions.
922
1599
- relpath is the relative path within the subtree being walked.
923
1600
- basename is the basename of the path
925
1602
present within the tree - but it may be recorded as versioned. See
927
1604
- lstat is the stat data *if* the file was statted.
928
- planned, not implemented:
1605
- planned, not implemented:
929
1606
path_from_tree_root is the path from the root of the tree.
931
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1608
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
932
1609
allows one to walk a subtree but get paths that are relative to a tree
933
1610
rooted higher up.
934
1611
:return: an iterator over the dirs.
936
1613
#TODO there is a bit of a smell where the results of the directory-
937
# summary in this, and the path from the root, may not agree
1614
# summary in this, and the path from the root, may not agree
938
1615
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
939
1616
# potentially confusing output. We should make this more robust - but
940
1617
# not at a speed cost. RBC 20060731
943
1619
_directory = _directory_kind
945
pending = [(prefix, "", _directory, None, top)]
1620
_listdir = os.listdir
1621
_kind_from_mode = file_kind_from_stat_mode
1622
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
948
currentdir = pending.pop()
949
1624
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
952
relroot = currentdir[0] + '/'
1625
relroot, _, _, _, top = pending.pop()
1627
relprefix = relroot + u'/'
1630
top_slash = top + u'/'
1633
append = dirblock.append
1635
names = sorted(_listdir(top))
1637
if not _is_error_enotdir(e):
1641
abspath = top_slash + name
1642
statvalue = _lstat(abspath)
1643
kind = _kind_from_mode(statvalue.st_mode)
1644
append((relprefix + name, name, kind, statvalue, abspath))
1645
yield (relroot, top), dirblock
1647
# push the user specified dirs from dirblock
1648
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1651
class DirReader(object):
1652
"""An interface for reading directories."""
1654
def top_prefix_to_starting_dir(self, top, prefix=""):
1655
"""Converts top and prefix to a starting dir entry
1657
:param top: A utf8 path
1658
:param prefix: An optional utf8 path to prefix output relative paths
1660
:return: A tuple starting with prefix, and ending with the native
1663
raise NotImplementedError(self.top_prefix_to_starting_dir)
1665
def read_dir(self, prefix, top):
1666
"""Read a specific dir.
1668
:param prefix: A utf8 prefix to be preprended to the path basenames.
1669
:param top: A natively encoded path to read.
1670
:return: A list of the directories contents. Each item contains:
1671
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1673
raise NotImplementedError(self.read_dir)
1676
_selected_dir_reader = None
1679
def _walkdirs_utf8(top, prefix=""):
1680
"""Yield data about all the directories in a tree.
1682
This yields the same information as walkdirs() only each entry is yielded
1683
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1684
are returned as exact byte-strings.
1686
:return: yields a tuple of (dir_info, [file_info])
1687
dir_info is (utf8_relpath, path-from-top)
1688
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1689
if top is an absolute path, path-from-top is also an absolute path.
1690
path-from-top might be unicode or utf8, but it is the correct path to
1691
pass to os functions to affect the file in question. (such as os.lstat)
1693
global _selected_dir_reader
1694
if _selected_dir_reader is None:
1695
fs_encoding = _fs_enc.upper()
1696
if sys.platform == "win32" and win32utils.winver == 'Windows NT':
1697
# Win98 doesn't have unicode apis like FindFirstFileW
1698
# TODO: We possibly could support Win98 by falling back to the
1699
# original FindFirstFile, and using TCHAR instead of WCHAR,
1700
# but that gets a bit tricky, and requires custom compiling
1703
from bzrlib._walkdirs_win32 import Win32ReadDir
1704
_selected_dir_reader = Win32ReadDir()
1707
elif fs_encoding in ('UTF-8', 'US-ASCII', 'ANSI_X3.4-1968'):
1708
# ANSI_X3.4-1968 is a form of ASCII
1710
from bzrlib._readdir_pyx import UTF8DirReader
1711
_selected_dir_reader = UTF8DirReader()
1712
except ImportError, e:
1713
failed_to_load_extension(e)
1716
if _selected_dir_reader is None:
1717
# Fallback to the python version
1718
_selected_dir_reader = UnicodeDirReader()
1720
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1721
# But we don't actually uses 1-3 in pending, so set them to None
1722
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1723
read_dir = _selected_dir_reader.read_dir
1724
_directory = _directory_kind
1726
relroot, _, _, _, top = pending[-1].pop()
1729
dirblock = sorted(read_dir(relroot, top))
1730
yield (relroot, top), dirblock
1731
# push the user specified dirs from dirblock
1732
next = [d for d in reversed(dirblock) if d[2] == _directory]
1734
pending.append(next)
1737
class UnicodeDirReader(DirReader):
1738
"""A dir reader for non-utf8 file systems, which transcodes."""
1740
__slots__ = ['_utf8_encode']
1743
self._utf8_encode = codecs.getencoder('utf8')
1745
def top_prefix_to_starting_dir(self, top, prefix=""):
1746
"""See DirReader.top_prefix_to_starting_dir."""
1747
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1749
def read_dir(self, prefix, top):
1750
"""Read a single directory from a non-utf8 file system.
1752
top, and the abspath element in the output are unicode, all other paths
1753
are utf8. Local disk IO is done via unicode calls to listdir etc.
1755
This is currently the fallback code path when the filesystem encoding is
1756
not UTF-8. It may be better to implement an alternative so that we can
1757
safely handle paths that are not properly decodable in the current
1760
See DirReader.read_dir for details.
1762
_utf8_encode = self._utf8_encode
1764
_listdir = os.listdir
1765
_kind_from_mode = file_kind_from_stat_mode
1768
relprefix = prefix + '/'
1771
top_slash = top + u'/'
1774
append = dirblock.append
955
1775
for name in sorted(_listdir(top)):
956
abspath = top + '/' + name
957
statvalue = lstat(abspath)
958
dirblock.append((relroot + name, name,
959
file_kind_from_stat_mode(statvalue.st_mode),
961
yield (currentdir[0], top), dirblock
962
# push the user specified dirs from dirblock
963
for dir in reversed(dirblock):
964
if dir[2] == _directory:
1777
name_utf8 = _utf8_encode(name)[0]
1778
except UnicodeDecodeError:
1779
raise errors.BadFilenameEncoding(
1780
_utf8_encode(relprefix)[0] + name, _fs_enc)
1781
abspath = top_slash + name
1782
statvalue = _lstat(abspath)
1783
kind = _kind_from_mode(statvalue.st_mode)
1784
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
968
1788
def copy_tree(from_path, to_path, handlers={}):
969
1789
"""Copy all of the entries in from_path into to_path.
971
:param from_path: The base directory to copy.
1791
:param from_path: The base directory to copy.
972
1792
:param to_path: The target directory. If it does not exist, it will
974
1794
:param handlers: A dictionary of functions, which takes a source and
1057
1913
' doesn\'t support the locale set by $LANG (%s)\n'
1058
1914
" Continuing with ascii encoding.\n"
1059
1915
% (e, os.environ.get('LANG')))
1061
if _cached_user_encoding is None:
1062
_cached_user_encoding = 'ascii'
1063
return _cached_user_encoding
1916
user_encoding = 'ascii'
1918
# Windows returns 'cp0' to indicate there is no code page. So we'll just
1919
# treat that as ASCII, and not support printing unicode characters to the
1922
# For python scripts run under vim, we get '', so also treat that as ASCII
1923
if user_encoding in (None, 'cp0', ''):
1924
user_encoding = 'ascii'
1928
codecs.lookup(user_encoding)
1930
sys.stderr.write('bzr: warning:'
1931
' unknown encoding %s.'
1932
' Continuing with ascii encoding.\n'
1935
user_encoding = 'ascii'
1938
_cached_user_encoding = user_encoding
1940
return user_encoding
1943
def get_host_name():
1944
"""Return the current unicode host name.
1946
This is meant to be used in place of socket.gethostname() because that
1947
behaves inconsistently on different platforms.
1949
if sys.platform == "win32":
1951
return win32utils.get_host_name()
1954
return socket.gethostname().decode(get_user_encoding())
1957
# We must not read/write any more than 64k at a time from/to a socket so we
1958
# don't risk "no buffer space available" errors on some platforms. Windows in
1959
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
1961
MAX_SOCKET_CHUNK = 64 * 1024
1963
def read_bytes_from_socket(sock, report_activity=None,
1964
max_read_size=MAX_SOCKET_CHUNK):
1965
"""Read up to max_read_size of bytes from sock and notify of progress.
1967
Translates "Connection reset by peer" into file-like EOF (return an
1968
empty string rather than raise an error), and repeats the recv if
1969
interrupted by a signal.
1973
bytes = sock.recv(max_read_size)
1974
except socket.error, e:
1976
if eno == getattr(errno, "WSAECONNRESET", errno.ECONNRESET):
1977
# The connection was closed by the other side. Callers expect
1978
# an empty string to signal end-of-stream.
1980
elif eno == errno.EINTR:
1981
# Retry the interrupted recv.
1985
if report_activity is not None:
1986
report_activity(len(bytes), 'read')
1990
def recv_all(socket, count):
1991
"""Receive an exact number of bytes.
1993
Regular Socket.recv() may return less than the requested number of bytes,
1994
depending on what's in the OS buffer. MSG_WAITALL is not available
1995
on all platforms, but this should work everywhere. This will return
1996
less than the requested amount if the remote end closes.
1998
This isn't optimized and is intended mostly for use in testing.
2001
while len(b) < count:
2002
new = read_bytes_from_socket(socket, None, count - len(b))
2009
def send_all(sock, bytes, report_activity=None):
2010
"""Send all bytes on a socket.
2012
Breaks large blocks in smaller chunks to avoid buffering limitations on
2013
some platforms, and catches EINTR which may be thrown if the send is
2014
interrupted by a signal.
2016
This is preferred to socket.sendall(), because it avoids portability bugs
2017
and provides activity reporting.
2019
:param report_activity: Call this as bytes are read, see
2020
Transport._report_activity
2023
byte_count = len(bytes)
2024
while sent_total < byte_count:
2026
sent = sock.send(buffer(bytes, sent_total, MAX_SOCKET_CHUNK))
2027
except socket.error, e:
2028
if e.args[0] != errno.EINTR:
2032
report_activity(sent, 'write')
2035
def dereference_path(path):
2036
"""Determine the real path to a file.
2038
All parent elements are dereferenced. But the file itself is not
2040
:param path: The original path. May be absolute or relative.
2041
:return: the real path *to* the file
2043
parent, base = os.path.split(path)
2044
# The pathjoin for '.' is a workaround for Python bug #1213894.
2045
# (initial path components aren't dereferenced)
2046
return pathjoin(realpath(pathjoin('.', parent)), base)
2049
def supports_mapi():
2050
"""Return True if we can use MAPI to launch a mail client."""
2051
return sys.platform == "win32"
2054
def resource_string(package, resource_name):
2055
"""Load a resource from a package and return it as a string.
2057
Note: Only packages that start with bzrlib are currently supported.
2059
This is designed to be a lightweight implementation of resource
2060
loading in a way which is API compatible with the same API from
2062
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2063
If and when pkg_resources becomes a standard library, this routine
2066
# Check package name is within bzrlib
2067
if package == "bzrlib":
2068
resource_relpath = resource_name
2069
elif package.startswith("bzrlib."):
2070
package = package[len("bzrlib."):].replace('.', os.sep)
2071
resource_relpath = pathjoin(package, resource_name)
2073
raise errors.BzrError('resource package %s not in bzrlib' % package)
2075
# Map the resource to a file and read its contents
2076
base = dirname(bzrlib.__file__)
2077
if getattr(sys, 'frozen', None): # bzr.exe
2078
base = abspath(pathjoin(base, '..', '..'))
2079
filename = pathjoin(base, resource_relpath)
2080
return open(filename, 'rU').read()
2083
def file_kind_from_stat_mode_thunk(mode):
2084
global file_kind_from_stat_mode
2085
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2087
from bzrlib._readdir_pyx import UTF8DirReader
2088
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2089
except ImportError, e:
2090
# This is one time where we won't warn that an extension failed to
2091
# load. The extension is never available on Windows anyway.
2092
from bzrlib._readdir_py import (
2093
_kind_from_mode as file_kind_from_stat_mode
2095
return file_kind_from_stat_mode(mode)
2096
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2099
def file_kind(f, _lstat=os.lstat):
2101
return file_kind_from_stat_mode(_lstat(f).st_mode)
2103
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2104
raise errors.NoSuchFile(f)
2108
def until_no_eintr(f, *a, **kw):
2109
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2111
WARNING: you must be certain that it is safe to retry the call repeatedly
2112
if EINTR does occur. This is typically only true for low-level operations
2113
like os.read. If in any doubt, don't use this.
2115
Keep in mind that this is not a complete solution to EINTR. There is
2116
probably code in the Python standard library and other dependencies that
2117
may encounter EINTR if a signal arrives (and there is signal handler for
2118
that signal). So this function can reduce the impact for IO that bzrlib
2119
directly controls, but it is not a complete solution.
2121
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2125
except (IOError, OSError), e:
2126
if e.errno == errno.EINTR:
2131
def re_compile_checked(re_string, flags=0, where=""):
2132
"""Return a compiled re, or raise a sensible error.
2134
This should only be used when compiling user-supplied REs.
2136
:param re_string: Text form of regular expression.
2137
:param flags: eg re.IGNORECASE
2138
:param where: Message explaining to the user the context where
2139
it occurred, eg 'log search filter'.
2141
# from https://bugs.launchpad.net/bzr/+bug/251352
2143
re_obj = re.compile(re_string, flags)
2148
where = ' in ' + where
2149
# despite the name 'error' is a type
2150
raise errors.BzrCommandError('Invalid regular expression%s: %r: %s'
2151
% (where, re_string, e))
2154
if sys.platform == "win32":
2157
return msvcrt.getch()
2162
fd = sys.stdin.fileno()
2163
settings = termios.tcgetattr(fd)
2166
ch = sys.stdin.read(1)
2168
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2172
if sys.platform == 'linux2':
2173
def _local_concurrency():
2175
prefix = 'processor'
2176
for line in file('/proc/cpuinfo', 'rb'):
2177
if line.startswith(prefix):
2178
concurrency = int(line[line.find(':')+1:]) + 1
2180
elif sys.platform == 'darwin':
2181
def _local_concurrency():
2182
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2183
stdout=subprocess.PIPE).communicate()[0]
2184
elif sys.platform[0:7] == 'freebsd':
2185
def _local_concurrency():
2186
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2187
stdout=subprocess.PIPE).communicate()[0]
2188
elif sys.platform == 'sunos5':
2189
def _local_concurrency():
2190
return subprocess.Popen(['psrinfo', '-p',],
2191
stdout=subprocess.PIPE).communicate()[0]
2192
elif sys.platform == "win32":
2193
def _local_concurrency():
2194
# This appears to return the number of cores.
2195
return os.environ.get('NUMBER_OF_PROCESSORS')
2197
def _local_concurrency():
2202
_cached_local_concurrency = None
2204
def local_concurrency(use_cache=True):
2205
"""Return how many processes can be run concurrently.
2207
Rely on platform specific implementations and default to 1 (one) if
2208
anything goes wrong.
2210
global _cached_local_concurrency
2212
if _cached_local_concurrency is not None and use_cache:
2213
return _cached_local_concurrency
2215
concurrency = os.environ.get('BZR_CONCURRENCY', None)
2216
if concurrency is None:
2218
concurrency = _local_concurrency()
2219
except (OSError, IOError):
2222
concurrency = int(concurrency)
2223
except (TypeError, ValueError):
2226
_cached_concurrency = concurrency
2230
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2231
"""A stream writer that doesn't decode str arguments."""
2233
def __init__(self, encode, stream, errors='strict'):
2234
codecs.StreamWriter.__init__(self, stream, errors)
2235
self.encode = encode
2237
def write(self, object):
2238
if type(object) is str:
2239
self.stream.write(object)
2241
data, _ = self.encode(object, self.errors)
2242
self.stream.write(data)
2244
if sys.platform == 'win32':
2245
def open_file(filename, mode='r', bufsize=-1):
2246
"""This function is used to override the ``open`` builtin.
2248
But it uses O_NOINHERIT flag so the file handle is not inherited by
2249
child processes. Deleting or renaming a closed file opened with this
2250
function is not blocking child processes.
2252
writing = 'w' in mode
2253
appending = 'a' in mode
2254
updating = '+' in mode
2255
binary = 'b' in mode
2258
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2259
# for flags for each modes.
2269
flags |= os.O_WRONLY
2270
flags |= os.O_CREAT | os.O_TRUNC
2275
flags |= os.O_WRONLY
2276
flags |= os.O_CREAT | os.O_APPEND
2281
flags |= os.O_RDONLY
2283
return os.fdopen(os.open(filename, flags), mode, bufsize)