24
34
realpath as _nt_realpath,
25
35
splitdrive as _nt_splitdrive,
28
from os import listdir
33
from shutil import copyfile
35
from stat import (S_ISREG, S_ISDIR, S_ISLNK, ST_MODE, ST_SIZE,
36
S_ISCHR, S_ISBLK, S_ISFIFO, S_ISSOCK)
46
from tempfile import (
59
from bzrlib.symbol_versioning import (
64
# sha and md5 modules are deprecated in python2.6 but hashlib is available as
66
if sys.version_info < (2, 5):
67
import md5 as _mod_md5
69
import sha as _mod_sha
45
from bzrlib.errors import (BzrError,
46
BzrBadParameterNotUnicode,
51
from bzrlib.symbol_versioning import (deprecated_function,
53
from bzrlib.trace import mutter
79
from bzrlib import symbol_versioning
82
# Cross platform wall-clock time functionality with decent resolution.
83
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
84
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
85
# synchronized with ``time.time()``, this is only meant to be used to find
86
# delta times by subtracting from another call to this function.
87
timer_func = time.time
88
if sys.platform == 'win32':
89
timer_func = time.clock
56
91
# On win32, O_BINARY is used to indicate the file should
57
92
# be opened in binary mode, rather than text mode.
58
93
# On other platforms, O_BINARY doesn't exist, because
59
94
# they always open in binary mode, so it is okay to
60
# OR with 0 on those platforms
95
# OR with 0 on those platforms.
96
# O_NOINHERIT and O_TEXT exists only on win32 too.
61
97
O_BINARY = getattr(os, 'O_BINARY', 0)
98
O_TEXT = getattr(os, 'O_TEXT', 0)
99
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
102
def get_unicode_argv():
104
user_encoding = get_user_encoding()
105
return [a.decode(user_encoding) for a in sys.argv[1:]]
106
except UnicodeDecodeError:
107
raise errors.BzrError(("Parameter '%r' is unsupported by the current "
64
111
def make_readonly(filename):
65
112
"""Make a filename read-only."""
66
mod = os.stat(filename).st_mode
68
os.chmod(filename, mod)
113
mod = os.lstat(filename).st_mode
114
if not stat.S_ISLNK(mod):
116
os.chmod(filename, mod)
71
119
def make_writable(filename):
72
mod = os.stat(filename).st_mode
74
os.chmod(filename, mod)
120
mod = os.lstat(filename).st_mode
121
if not stat.S_ISLNK(mod):
123
os.chmod(filename, mod)
126
def minimum_path_selection(paths):
127
"""Return the smallset subset of paths which are outside paths.
129
:param paths: A container (and hence not None) of paths.
130
:return: A set of paths sufficient to include everything in paths via
131
is_inside, drawn from the paths parameter.
137
return path.split('/')
138
sorted_paths = sorted(list(paths), key=sort_key)
140
search_paths = [sorted_paths[0]]
141
for path in sorted_paths[1:]:
142
if not is_inside(search_paths[-1], path):
143
# This path is unique, add it
144
search_paths.append(path)
146
return set(search_paths)
482
560
for dirname in dir_list:
483
561
if is_inside(dirname, fname) or is_inside(fname, dirname):
566
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
567
report_activity=None, direction='read'):
568
"""Copy contents of one file to another.
570
The read_length can either be -1 to read to end-of-file (EOF) or
571
it can specify the maximum number of bytes to read.
573
The buff_size represents the maximum size for each read operation
574
performed on from_file.
576
:param report_activity: Call this as bytes are read, see
577
Transport._report_activity
578
:param direction: Will be passed to report_activity
580
:return: The number of bytes copied.
584
# read specified number of bytes
586
while read_length > 0:
587
num_bytes_to_read = min(read_length, buff_size)
589
block = from_file.read(num_bytes_to_read)
593
if report_activity is not None:
594
report_activity(len(block), direction)
597
actual_bytes_read = len(block)
598
read_length -= actual_bytes_read
599
length += actual_bytes_read
489
def pumpfile(fromfile, tofile):
490
"""Copy contents of one file to another."""
493
b = fromfile.read(BUFSIZE)
603
block = from_file.read(buff_size)
607
if report_activity is not None:
608
report_activity(len(block), direction)
614
def pump_string_file(bytes, file_handle, segment_size=None):
615
"""Write bytes to file_handle in many smaller writes.
617
:param bytes: The string to write.
618
:param file_handle: The file to write to.
620
# Write data in chunks rather than all at once, because very large
621
# writes fail on some platforms (e.g. Windows with SMB mounted
624
segment_size = 5242880 # 5MB
625
segments = range(len(bytes) / segment_size + 1)
626
write = file_handle.write
627
for segment_index in segments:
628
segment = buffer(bytes, segment_index * segment_size, segment_size)
499
632
def file_iterator(input_file, readsize=32768):
555
716
def local_time_offset(t=None):
556
717
"""Return offset of local zone from GMT, either at present or at time t."""
557
# python2.3 localtime() can't take None
561
if time.localtime(t).tm_isdst and time.daylight:
564
return -time.timezone
567
def format_date(t, offset=0, timezone='original', date_fmt=None,
720
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
721
return offset.days * 86400 + offset.seconds
723
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
724
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
727
def format_date(t, offset=0, timezone='original', date_fmt=None,
568
728
show_offset=True):
569
## TODO: Perhaps a global option to use either universal or local time?
570
## Or perhaps just let people set $TZ?
571
assert isinstance(t, float)
729
"""Return a formatted date string.
731
:param t: Seconds since the epoch.
732
:param offset: Timezone offset in seconds east of utc.
733
:param timezone: How to display the time: 'utc', 'original' for the
734
timezone specified by offset, or 'local' for the process's current
736
:param date_fmt: strftime format.
737
:param show_offset: Whether to append the timezone.
739
(date_fmt, tt, offset_str) = \
740
_format_date(t, offset, timezone, date_fmt, show_offset)
741
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
742
date_str = time.strftime(date_fmt, tt)
743
return date_str + offset_str
746
# Cache of formatted offset strings
750
def format_date_with_offset_in_original_timezone(t, offset=0,
751
_cache=_offset_cache):
752
"""Return a formatted date string in the original timezone.
754
This routine may be faster then format_date.
756
:param t: Seconds since the epoch.
757
:param offset: Timezone offset in seconds east of utc.
761
tt = time.gmtime(t + offset)
762
date_fmt = _default_format_by_weekday_num[tt[6]]
763
date_str = time.strftime(date_fmt, tt)
764
offset_str = _cache.get(offset, None)
765
if offset_str is None:
766
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
767
_cache[offset] = offset_str
768
return date_str + offset_str
771
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
773
"""Return an unicode date string formatted according to the current locale.
775
:param t: Seconds since the epoch.
776
:param offset: Timezone offset in seconds east of utc.
777
:param timezone: How to display the time: 'utc', 'original' for the
778
timezone specified by offset, or 'local' for the process's current
780
:param date_fmt: strftime format.
781
:param show_offset: Whether to append the timezone.
783
(date_fmt, tt, offset_str) = \
784
_format_date(t, offset, timezone, date_fmt, show_offset)
785
date_str = time.strftime(date_fmt, tt)
786
if not isinstance(date_str, unicode):
787
date_str = date_str.decode(get_user_encoding(), 'replace')
788
return date_str + offset_str
791
def _format_date(t, offset, timezone, date_fmt, show_offset):
573
792
if timezone == 'utc':
574
793
tt = time.gmtime(t)
581
800
tt = time.localtime(t)
582
801
offset = local_time_offset(t)
584
raise BzrError("unsupported timezone format %r" % timezone,
585
['options are "utc", "original", "local"'])
803
raise errors.UnsupportedTimezoneFormat(timezone)
586
804
if date_fmt is None:
587
805
date_fmt = "%a %Y-%m-%d %H:%M:%S"
589
807
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
592
return (time.strftime(date_fmt, tt) + offset_str)
810
return (date_fmt, tt, offset_str)
595
813
def compact_date(when):
596
814
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
817
def format_delta(delta):
818
"""Get a nice looking string for a time delta.
820
:param delta: The time difference in seconds, can be positive or negative.
821
positive indicates time in the past, negative indicates time in the
822
future. (usually time.time() - stored_time)
823
:return: String formatted to show approximate resolution
829
direction = 'in the future'
833
if seconds < 90: # print seconds up to 90 seconds
835
return '%d second %s' % (seconds, direction,)
837
return '%d seconds %s' % (seconds, direction)
839
minutes = int(seconds / 60)
840
seconds -= 60 * minutes
845
if minutes < 90: # print minutes, seconds up to 90 minutes
847
return '%d minute, %d second%s %s' % (
848
minutes, seconds, plural_seconds, direction)
850
return '%d minutes, %d second%s %s' % (
851
minutes, seconds, plural_seconds, direction)
853
hours = int(minutes / 60)
854
minutes -= 60 * hours
861
return '%d hour, %d minute%s %s' % (hours, minutes,
862
plural_minutes, direction)
863
return '%d hours, %d minute%s %s' % (hours, minutes,
864
plural_minutes, direction)
601
867
"""Return size of given open file."""
669
raise BzrError("sorry, %r not allowed in path" % f)
919
raise errors.BzrError("sorry, %r not allowed in path" % f)
670
920
elif (f == '.') or (f == ''):
677
assert isinstance(p, list)
679
929
if (f == '..') or (f is None) or (f == ''):
680
raise BzrError("sorry, %r not allowed in path" % f)
930
raise errors.BzrError("sorry, %r not allowed in path" % f)
681
931
return pathjoin(*p)
684
@deprecated_function(zero_nine)
685
def appendpath(p1, p2):
689
return pathjoin(p1, p2)
934
def parent_directories(filename):
935
"""Return the list of parent directories, deepest first.
937
For example, parent_directories("a/b/c") -> ["a/b", "a"].
940
parts = splitpath(dirname(filename))
942
parents.append(joinpath(parts))
947
_extension_load_failures = []
950
def failed_to_load_extension(exception):
951
"""Handle failing to load a binary extension.
953
This should be called from the ImportError block guarding the attempt to
954
import the native extension. If this function returns, the pure-Python
955
implementation should be loaded instead::
958
>>> import bzrlib._fictional_extension_pyx
959
>>> except ImportError, e:
960
>>> bzrlib.osutils.failed_to_load_extension(e)
961
>>> import bzrlib._fictional_extension_py
963
# NB: This docstring is just an example, not a doctest, because doctest
964
# currently can't cope with the use of lazy imports in this namespace --
967
# This currently doesn't report the failure at the time it occurs, because
968
# they tend to happen very early in startup when we can't check config
969
# files etc, and also we want to report all failures but not spam the user
971
from bzrlib import trace
972
exception_str = str(exception)
973
if exception_str not in _extension_load_failures:
974
trace.mutter("failed to load compiled extension: %s" % exception_str)
975
_extension_load_failures.append(exception_str)
978
def report_extension_load_failures():
979
if not _extension_load_failures:
981
from bzrlib.config import GlobalConfig
982
if GlobalConfig().get_user_option_as_bool('ignore_missing_extensions'):
984
# the warnings framework should by default show this only once
985
from bzrlib.trace import warning
987
"bzr: warning: some compiled extensions could not be loaded; "
988
"see <https://answers.launchpad.net/bzr/+faq/703>")
989
# we no longer show the specific missing extensions here, because it makes
990
# the message too long and scary - see
991
# https://bugs.launchpad.net/bzr/+bug/430529
995
from bzrlib._chunks_to_lines_pyx import chunks_to_lines
996
except ImportError, e:
997
failed_to_load_extension(e)
998
from bzrlib._chunks_to_lines_py import chunks_to_lines
692
1001
def split_lines(s):
693
1002
"""Split s into lines, but without removing the newline characters."""
1003
# Trivially convert a fulltext into a 'chunked' representation, and let
1004
# chunks_to_lines do the heavy lifting.
1005
if isinstance(s, str):
1006
# chunks_to_lines only supports 8-bit strings
1007
return chunks_to_lines([s])
1009
return _split_lines(s)
1012
def _split_lines(s):
1013
"""Split s into lines, but without removing the newline characters.
1015
This supports Unicode or plain string objects.
694
1017
lines = s.split('\n')
695
1018
result = [line + '\n' for line in lines[:-1]]
761
1142
avoids that problem.
764
assert len(base) >= MIN_ABS_PATHLENGTH, ('Length of base must be equal or'
765
' exceed the platform minimum length (which is %d)' %
1145
if len(base) < MIN_ABS_PATHLENGTH:
1146
# must have space for e.g. a drive letter
1147
raise ValueError('%r is too short to calculate a relative path'
768
1150
rp = abspath(path)
772
while len(head) >= len(base):
1155
if len(head) <= len(base) and head != base:
1156
raise errors.PathNotChild(rp, base)
773
1157
if head == base:
775
head, tail = os.path.split(head)
1159
head, tail = split(head)
779
raise PathNotChild(rp, base)
1164
return pathjoin(*reversed(s))
1169
def _cicp_canonical_relpath(base, path):
1170
"""Return the canonical path relative to base.
1172
Like relpath, but on case-insensitive-case-preserving file-systems, this
1173
will return the relpath as stored on the file-system rather than in the
1174
case specified in the input string, for all existing portions of the path.
1176
This will cause O(N) behaviour if called for every path in a tree; if you
1177
have a number of paths to convert, you should use canonical_relpaths().
1179
# TODO: it should be possible to optimize this for Windows by using the
1180
# win32 API FindFiles function to look for the specified name - but using
1181
# os.listdir() still gives us the correct, platform agnostic semantics in
1184
rel = relpath(base, path)
1185
# '.' will have been turned into ''
1189
abs_base = abspath(base)
1191
_listdir = os.listdir
1193
# use an explicit iterator so we can easily consume the rest on early exit.
1194
bit_iter = iter(rel.split('/'))
1195
for bit in bit_iter:
1198
next_entries = _listdir(current)
1199
except OSError: # enoent, eperm, etc
1200
# We can't find this in the filesystem, so just append the
1202
current = pathjoin(current, bit, *list(bit_iter))
1204
for look in next_entries:
1205
if lbit == look.lower():
1206
current = pathjoin(current, look)
1209
# got to the end, nothing matched, so we just return the
1210
# non-existing bits as they were specified (the filename may be
1211
# the target of a move, for example).
1212
current = pathjoin(current, bit, *list(bit_iter))
1214
return current[len(abs_base):].lstrip('/')
1216
# XXX - TODO - we need better detection/integration of case-insensitive
1217
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1218
# filesystems), for example, so could probably benefit from the same basic
1219
# support there. For now though, only Windows and OSX get that support, and
1220
# they get it for *all* file-systems!
1221
if sys.platform in ('win32', 'darwin'):
1222
canonical_relpath = _cicp_canonical_relpath
1224
canonical_relpath = relpath
1226
def canonical_relpaths(base, paths):
1227
"""Create an iterable to canonicalize a sequence of relative paths.
1229
The intent is for this implementation to use a cache, vastly speeding
1230
up multiple transformations in the same directory.
1232
# but for now, we haven't optimized...
1233
return [canonical_relpath(base, p) for p in paths]
787
1235
def safe_unicode(unicode_or_utf8_string):
788
1236
"""Coerce unicode_or_utf8_string into unicode.
790
1238
If it is unicode, it is returned.
791
Otherwise it is decoded from utf-8. If a decoding error
792
occurs, it is wrapped as a If the decoding fails, the exception is wrapped
793
as a BzrBadParameter exception.
1239
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1240
wrapped in a BzrBadParameterNotUnicode exception.
795
1242
if isinstance(unicode_or_utf8_string, unicode):
796
1243
return unicode_or_utf8_string
798
1245
return unicode_or_utf8_string.decode('utf8')
799
1246
except UnicodeDecodeError:
800
raise BzrBadParameterNotUnicode(unicode_or_utf8_string)
1247
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1250
def safe_utf8(unicode_or_utf8_string):
1251
"""Coerce unicode_or_utf8_string to a utf8 string.
1253
If it is a str, it is returned.
1254
If it is Unicode, it is encoded into a utf-8 string.
1256
if isinstance(unicode_or_utf8_string, str):
1257
# TODO: jam 20070209 This is overkill, and probably has an impact on
1258
# performance if we are dealing with lots of apis that want a
1261
# Make sure it is a valid utf-8 string
1262
unicode_or_utf8_string.decode('utf-8')
1263
except UnicodeDecodeError:
1264
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1265
return unicode_or_utf8_string
1266
return unicode_or_utf8_string.encode('utf-8')
1269
_revision_id_warning = ('Unicode revision ids were deprecated in bzr 0.15.'
1270
' Revision id generators should be creating utf8'
1274
def safe_revision_id(unicode_or_utf8_string, warn=True):
1275
"""Revision ids should now be utf8, but at one point they were unicode.
1277
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1279
:param warn: Functions that are sanitizing user data can set warn=False
1280
:return: None or a utf8 revision id.
1282
if (unicode_or_utf8_string is None
1283
or unicode_or_utf8_string.__class__ == str):
1284
return unicode_or_utf8_string
1286
symbol_versioning.warn(_revision_id_warning, DeprecationWarning,
1288
return cache_utf8.encode(unicode_or_utf8_string)
1291
_file_id_warning = ('Unicode file ids were deprecated in bzr 0.15. File id'
1292
' generators should be creating utf8 file ids.')
1295
def safe_file_id(unicode_or_utf8_string, warn=True):
1296
"""File ids should now be utf8, but at one point they were unicode.
1298
This is the same as safe_utf8, except it uses the cached encode functions
1299
to save a little bit of performance.
1301
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1303
:param warn: Functions that are sanitizing user data can set warn=False
1304
:return: None or a utf8 file id.
1306
if (unicode_or_utf8_string is None
1307
or unicode_or_utf8_string.__class__ == str):
1308
return unicode_or_utf8_string
1310
symbol_versioning.warn(_file_id_warning, DeprecationWarning,
1312
return cache_utf8.encode(unicode_or_utf8_string)
803
1315
_platform_normalizes_filenames = False
844
1356
normalized_filename = _inaccessible_normalized_filename
1359
def set_signal_handler(signum, handler, restart_syscall=True):
1360
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1361
on platforms that support that.
1363
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1364
automatically restart (by calling `signal.siginterrupt(signum,
1365
False)`). May be ignored if the feature is not available on this
1366
platform or Python version.
1368
old_handler = signal.signal(signum, handler)
1371
siginterrupt = signal.siginterrupt
1372
except AttributeError: # siginterrupt doesn't exist on this platform, or for this version of
1376
siginterrupt(signum, False)
1380
default_terminal_width = 80
1381
"""The default terminal width for ttys.
1383
This is defined so that higher levels can share a common fallback value when
1384
terminal_width() returns None.
847
1388
def terminal_width():
848
"""Return estimated terminal width."""
849
if sys.platform == 'win32':
850
import bzrlib.win32console
851
return bzrlib.win32console.get_console_size()[0]
1389
"""Return terminal width.
1391
None is returned if the width can't established precisely.
1394
- if BZR_COLUMNS is set, returns its value
1395
- if there is no controlling terminal, returns None
1396
- if COLUMNS is set, returns its value,
1398
From there, we need to query the OS to get the size of the controlling
1402
- get termios.TIOCGWINSZ
1403
- if an error occurs or a negative value is obtained, returns None
1407
- win32utils.get_console_size() decides,
1408
- returns None on error (provided default value)
1411
# If BZR_COLUMNS is set, take it, user is always right
1413
return int(os.environ['BZR_COLUMNS'])
1414
except (KeyError, ValueError):
1417
isatty = getattr(sys.stdout, 'isatty', None)
1418
if isatty is None or not isatty():
1419
# Don't guess, setting BZR_COLUMNS is the recommended way to override.
1422
# If COLUMNS is set, take it, the terminal knows better (even inside a
1423
# given terminal, the application can decide to set COLUMNS to a lower
1424
# value (splitted screen) or a bigger value (scroll bars))
1426
return int(os.environ['COLUMNS'])
1427
except (KeyError, ValueError):
1430
width, height = _terminal_size(None, None)
1432
# Consider invalid values as meaning no width
1438
def _win32_terminal_size(width, height):
1439
width, height = win32utils.get_console_size(defaultx=width, defaulty=height)
1440
return width, height
1443
def _ioctl_terminal_size(width, height):
854
1445
import struct, fcntl, termios
855
1446
s = struct.pack('HHHH', 0, 0, 0, 0)
856
1447
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
857
width = struct.unpack('HHHH', x)[1]
1448
height, width = struct.unpack('HHHH', x)[0:2]
1449
except (IOError, AttributeError):
862
width = int(os.environ['COLUMNS'])
1451
return width, height
1453
_terminal_size = None
1454
"""Returns the terminal size as (width, height).
1456
:param width: Default value for width.
1457
:param height: Default value for height.
1459
This is defined specifically for each OS and query the size of the controlling
1460
terminal. If any error occurs, the provided default values should be returned.
1462
if sys.platform == 'win32':
1463
_terminal_size = _win32_terminal_size
1465
_terminal_size = _ioctl_terminal_size
1468
def _terminal_size_changed(signum, frame):
1469
"""Set COLUMNS upon receiving a SIGnal for WINdow size CHange."""
1470
width, height = _terminal_size(None, None)
1471
if width is not None:
1472
os.environ['COLUMNS'] = str(width)
1475
_registered_sigwinch = False
1477
def watch_sigwinch():
1478
"""Register for SIGWINCH, once and only once."""
1479
global _registered_sigwinch
1480
if not _registered_sigwinch:
1481
if sys.platform == 'win32':
1482
# Martin (gz) mentioned WINDOW_BUFFER_SIZE_RECORD from
1483
# ReadConsoleInput but I've no idea how to plug that in
1484
# the current design -- vila 20091216
1487
set_signal_handler(signal.SIGWINCH, _terminal_size_changed)
1488
_registered_sigwinch = True
871
1491
def supports_executable():
872
1492
return sys.platform != "win32"
1495
def supports_posix_readonly():
1496
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1498
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1499
directory controls creation/deletion, etc.
1501
And under win32, readonly means that the directory itself cannot be
1502
deleted. The contents of a readonly directory can be changed, unlike POSIX
1503
where files in readonly directories cannot be added, deleted or renamed.
1505
return sys.platform != "win32"
875
1508
def set_or_unset_env(env_variable, value):
876
1509
"""Modify the environment, setting or removing the env_variable.
897
1530
def check_legal_path(path):
898
"""Check whether the supplied path is legal.
1531
"""Check whether the supplied path is legal.
899
1532
This is only required on Windows, so we don't test on other platforms
902
1535
if sys.platform != "win32":
904
1537
if _validWin32PathRE.match(path) is None:
905
raise IllegalPath(path)
1538
raise errors.IllegalPath(path)
1541
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1543
def _is_error_enotdir(e):
1544
"""Check if this exception represents ENOTDIR.
1546
Unfortunately, python is very inconsistent about the exception
1547
here. The cases are:
1548
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1549
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1550
which is the windows error code.
1551
3) Windows, Python2.5 uses errno == EINVAL and
1552
winerror == ERROR_DIRECTORY
1554
:param e: An Exception object (expected to be OSError with an errno
1555
attribute, but we should be able to cope with anything)
1556
:return: True if this represents an ENOTDIR error. False otherwise.
1558
en = getattr(e, 'errno', None)
1559
if (en == errno.ENOTDIR
1560
or (sys.platform == 'win32'
1561
and (en == _WIN32_ERROR_DIRECTORY
1562
or (en == errno.EINVAL
1563
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
908
1569
def walkdirs(top, prefix=""):
909
1570
"""Yield data about all the directories in a tree.
911
1572
This yields all the data about the contents of a directory at a time.
912
1573
After each directory has been yielded, if the caller has mutated the list
913
1574
to exclude some directories, they are then not descended into.
915
1576
The data yielded is of the form:
916
1577
((directory-relpath, directory-path-from-top),
917
[(relpath, basename, kind, lstat), ...]),
1578
[(relpath, basename, kind, lstat, path-from-top), ...]),
918
1579
- directory-relpath is the relative path of the directory being returned
919
1580
with respect to top. prefix is prepended to this.
920
- directory-path-from-root is the path including top for this directory.
1581
- directory-path-from-root is the path including top for this directory.
921
1582
It is suitable for use with os functions.
922
1583
- relpath is the relative path within the subtree being walked.
923
1584
- basename is the basename of the path
925
1586
present within the tree - but it may be recorded as versioned. See
927
1588
- lstat is the stat data *if* the file was statted.
928
- planned, not implemented:
1589
- planned, not implemented:
929
1590
path_from_tree_root is the path from the root of the tree.
931
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1592
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
932
1593
allows one to walk a subtree but get paths that are relative to a tree
933
1594
rooted higher up.
934
1595
:return: an iterator over the dirs.
936
1597
#TODO there is a bit of a smell where the results of the directory-
937
# summary in this, and the path from the root, may not agree
1598
# summary in this, and the path from the root, may not agree
938
1599
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
939
1600
# potentially confusing output. We should make this more robust - but
940
1601
# not at a speed cost. RBC 20060731
943
1603
_directory = _directory_kind
945
pending = [(prefix, "", _directory, None, top)]
1604
_listdir = os.listdir
1605
_kind_from_mode = file_kind_from_stat_mode
1606
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
948
currentdir = pending.pop()
949
1608
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
952
relroot = currentdir[0] + '/'
1609
relroot, _, _, _, top = pending.pop()
1611
relprefix = relroot + u'/'
1614
top_slash = top + u'/'
1617
append = dirblock.append
1619
names = sorted(_listdir(top))
1621
if not _is_error_enotdir(e):
1625
abspath = top_slash + name
1626
statvalue = _lstat(abspath)
1627
kind = _kind_from_mode(statvalue.st_mode)
1628
append((relprefix + name, name, kind, statvalue, abspath))
1629
yield (relroot, top), dirblock
1631
# push the user specified dirs from dirblock
1632
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1635
class DirReader(object):
1636
"""An interface for reading directories."""
1638
def top_prefix_to_starting_dir(self, top, prefix=""):
1639
"""Converts top and prefix to a starting dir entry
1641
:param top: A utf8 path
1642
:param prefix: An optional utf8 path to prefix output relative paths
1644
:return: A tuple starting with prefix, and ending with the native
1647
raise NotImplementedError(self.top_prefix_to_starting_dir)
1649
def read_dir(self, prefix, top):
1650
"""Read a specific dir.
1652
:param prefix: A utf8 prefix to be preprended to the path basenames.
1653
:param top: A natively encoded path to read.
1654
:return: A list of the directories contents. Each item contains:
1655
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1657
raise NotImplementedError(self.read_dir)
1660
_selected_dir_reader = None
1663
def _walkdirs_utf8(top, prefix=""):
1664
"""Yield data about all the directories in a tree.
1666
This yields the same information as walkdirs() only each entry is yielded
1667
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1668
are returned as exact byte-strings.
1670
:return: yields a tuple of (dir_info, [file_info])
1671
dir_info is (utf8_relpath, path-from-top)
1672
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1673
if top is an absolute path, path-from-top is also an absolute path.
1674
path-from-top might be unicode or utf8, but it is the correct path to
1675
pass to os functions to affect the file in question. (such as os.lstat)
1677
global _selected_dir_reader
1678
if _selected_dir_reader is None:
1679
fs_encoding = _fs_enc.upper()
1680
if sys.platform == "win32" and win32utils.winver == 'Windows NT':
1681
# Win98 doesn't have unicode apis like FindFirstFileW
1682
# TODO: We possibly could support Win98 by falling back to the
1683
# original FindFirstFile, and using TCHAR instead of WCHAR,
1684
# but that gets a bit tricky, and requires custom compiling
1687
from bzrlib._walkdirs_win32 import Win32ReadDir
1688
_selected_dir_reader = Win32ReadDir()
1691
elif fs_encoding in ('UTF-8', 'US-ASCII', 'ANSI_X3.4-1968'):
1692
# ANSI_X3.4-1968 is a form of ASCII
1694
from bzrlib._readdir_pyx import UTF8DirReader
1695
_selected_dir_reader = UTF8DirReader()
1696
except ImportError, e:
1697
failed_to_load_extension(e)
1700
if _selected_dir_reader is None:
1701
# Fallback to the python version
1702
_selected_dir_reader = UnicodeDirReader()
1704
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1705
# But we don't actually uses 1-3 in pending, so set them to None
1706
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1707
read_dir = _selected_dir_reader.read_dir
1708
_directory = _directory_kind
1710
relroot, _, _, _, top = pending[-1].pop()
1713
dirblock = sorted(read_dir(relroot, top))
1714
yield (relroot, top), dirblock
1715
# push the user specified dirs from dirblock
1716
next = [d for d in reversed(dirblock) if d[2] == _directory]
1718
pending.append(next)
1721
class UnicodeDirReader(DirReader):
1722
"""A dir reader for non-utf8 file systems, which transcodes."""
1724
__slots__ = ['_utf8_encode']
1727
self._utf8_encode = codecs.getencoder('utf8')
1729
def top_prefix_to_starting_dir(self, top, prefix=""):
1730
"""See DirReader.top_prefix_to_starting_dir."""
1731
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1733
def read_dir(self, prefix, top):
1734
"""Read a single directory from a non-utf8 file system.
1736
top, and the abspath element in the output are unicode, all other paths
1737
are utf8. Local disk IO is done via unicode calls to listdir etc.
1739
This is currently the fallback code path when the filesystem encoding is
1740
not UTF-8. It may be better to implement an alternative so that we can
1741
safely handle paths that are not properly decodable in the current
1744
See DirReader.read_dir for details.
1746
_utf8_encode = self._utf8_encode
1748
_listdir = os.listdir
1749
_kind_from_mode = file_kind_from_stat_mode
1752
relprefix = prefix + '/'
1755
top_slash = top + u'/'
1758
append = dirblock.append
955
1759
for name in sorted(_listdir(top)):
956
abspath = top + '/' + name
957
statvalue = lstat(abspath)
958
dirblock.append((relroot + name, name,
959
file_kind_from_stat_mode(statvalue.st_mode),
961
yield (currentdir[0], top), dirblock
962
# push the user specified dirs from dirblock
963
for dir in reversed(dirblock):
964
if dir[2] == _directory:
1761
name_utf8 = _utf8_encode(name)[0]
1762
except UnicodeDecodeError:
1763
raise errors.BadFilenameEncoding(
1764
_utf8_encode(relprefix)[0] + name, _fs_enc)
1765
abspath = top_slash + name
1766
statvalue = _lstat(abspath)
1767
kind = _kind_from_mode(statvalue.st_mode)
1768
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
968
1772
def copy_tree(from_path, to_path, handlers={}):
969
1773
"""Copy all of the entries in from_path into to_path.
971
:param from_path: The base directory to copy.
1775
:param from_path: The base directory to copy.
972
1776
:param to_path: The target directory. If it does not exist, it will
974
1778
:param handlers: A dictionary of functions, which takes a source and
1057
1921
' doesn\'t support the locale set by $LANG (%s)\n'
1058
1922
" Continuing with ascii encoding.\n"
1059
1923
% (e, os.environ.get('LANG')))
1061
if _cached_user_encoding is None:
1062
_cached_user_encoding = 'ascii'
1063
return _cached_user_encoding
1924
user_encoding = 'ascii'
1926
# Windows returns 'cp0' to indicate there is no code page. So we'll just
1927
# treat that as ASCII, and not support printing unicode characters to the
1930
# For python scripts run under vim, we get '', so also treat that as ASCII
1931
if user_encoding in (None, 'cp0', ''):
1932
user_encoding = 'ascii'
1936
codecs.lookup(user_encoding)
1938
sys.stderr.write('bzr: warning:'
1939
' unknown encoding %s.'
1940
' Continuing with ascii encoding.\n'
1943
user_encoding = 'ascii'
1946
_cached_user_encoding = user_encoding
1948
return user_encoding
1951
def get_host_name():
1952
"""Return the current unicode host name.
1954
This is meant to be used in place of socket.gethostname() because that
1955
behaves inconsistently on different platforms.
1957
if sys.platform == "win32":
1959
return win32utils.get_host_name()
1962
return socket.gethostname().decode(get_user_encoding())
1965
# We must not read/write any more than 64k at a time from/to a socket so we
1966
# don't risk "no buffer space available" errors on some platforms. Windows in
1967
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
1969
MAX_SOCKET_CHUNK = 64 * 1024
1971
def read_bytes_from_socket(sock, report_activity=None,
1972
max_read_size=MAX_SOCKET_CHUNK):
1973
"""Read up to max_read_size of bytes from sock and notify of progress.
1975
Translates "Connection reset by peer" into file-like EOF (return an
1976
empty string rather than raise an error), and repeats the recv if
1977
interrupted by a signal.
1981
bytes = sock.recv(max_read_size)
1982
except socket.error, e:
1984
if eno == getattr(errno, "WSAECONNRESET", errno.ECONNRESET):
1985
# The connection was closed by the other side. Callers expect
1986
# an empty string to signal end-of-stream.
1988
elif eno == errno.EINTR:
1989
# Retry the interrupted recv.
1993
if report_activity is not None:
1994
report_activity(len(bytes), 'read')
1998
def recv_all(socket, count):
1999
"""Receive an exact number of bytes.
2001
Regular Socket.recv() may return less than the requested number of bytes,
2002
depending on what's in the OS buffer. MSG_WAITALL is not available
2003
on all platforms, but this should work everywhere. This will return
2004
less than the requested amount if the remote end closes.
2006
This isn't optimized and is intended mostly for use in testing.
2009
while len(b) < count:
2010
new = read_bytes_from_socket(socket, None, count - len(b))
2017
def send_all(sock, bytes, report_activity=None):
2018
"""Send all bytes on a socket.
2020
Breaks large blocks in smaller chunks to avoid buffering limitations on
2021
some platforms, and catches EINTR which may be thrown if the send is
2022
interrupted by a signal.
2024
This is preferred to socket.sendall(), because it avoids portability bugs
2025
and provides activity reporting.
2027
:param report_activity: Call this as bytes are read, see
2028
Transport._report_activity
2031
byte_count = len(bytes)
2032
while sent_total < byte_count:
2034
sent = sock.send(buffer(bytes, sent_total, MAX_SOCKET_CHUNK))
2035
except socket.error, e:
2036
if e.args[0] != errno.EINTR:
2040
report_activity(sent, 'write')
2043
def dereference_path(path):
2044
"""Determine the real path to a file.
2046
All parent elements are dereferenced. But the file itself is not
2048
:param path: The original path. May be absolute or relative.
2049
:return: the real path *to* the file
2051
parent, base = os.path.split(path)
2052
# The pathjoin for '.' is a workaround for Python bug #1213894.
2053
# (initial path components aren't dereferenced)
2054
return pathjoin(realpath(pathjoin('.', parent)), base)
2057
def supports_mapi():
2058
"""Return True if we can use MAPI to launch a mail client."""
2059
return sys.platform == "win32"
2062
def resource_string(package, resource_name):
2063
"""Load a resource from a package and return it as a string.
2065
Note: Only packages that start with bzrlib are currently supported.
2067
This is designed to be a lightweight implementation of resource
2068
loading in a way which is API compatible with the same API from
2070
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2071
If and when pkg_resources becomes a standard library, this routine
2074
# Check package name is within bzrlib
2075
if package == "bzrlib":
2076
resource_relpath = resource_name
2077
elif package.startswith("bzrlib."):
2078
package = package[len("bzrlib."):].replace('.', os.sep)
2079
resource_relpath = pathjoin(package, resource_name)
2081
raise errors.BzrError('resource package %s not in bzrlib' % package)
2083
# Map the resource to a file and read its contents
2084
base = dirname(bzrlib.__file__)
2085
if getattr(sys, 'frozen', None): # bzr.exe
2086
base = abspath(pathjoin(base, '..', '..'))
2087
filename = pathjoin(base, resource_relpath)
2088
return open(filename, 'rU').read()
2091
def file_kind_from_stat_mode_thunk(mode):
2092
global file_kind_from_stat_mode
2093
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2095
from bzrlib._readdir_pyx import UTF8DirReader
2096
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2097
except ImportError, e:
2098
# This is one time where we won't warn that an extension failed to
2099
# load. The extension is never available on Windows anyway.
2100
from bzrlib._readdir_py import (
2101
_kind_from_mode as file_kind_from_stat_mode
2103
return file_kind_from_stat_mode(mode)
2104
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2107
def file_kind(f, _lstat=os.lstat):
2109
return file_kind_from_stat_mode(_lstat(f).st_mode)
2111
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2112
raise errors.NoSuchFile(f)
2116
def until_no_eintr(f, *a, **kw):
2117
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2119
WARNING: you must be certain that it is safe to retry the call repeatedly
2120
if EINTR does occur. This is typically only true for low-level operations
2121
like os.read. If in any doubt, don't use this.
2123
Keep in mind that this is not a complete solution to EINTR. There is
2124
probably code in the Python standard library and other dependencies that
2125
may encounter EINTR if a signal arrives (and there is signal handler for
2126
that signal). So this function can reduce the impact for IO that bzrlib
2127
directly controls, but it is not a complete solution.
2129
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2133
except (IOError, OSError), e:
2134
if e.errno == errno.EINTR:
2139
def re_compile_checked(re_string, flags=0, where=""):
2140
"""Return a compiled re, or raise a sensible error.
2142
This should only be used when compiling user-supplied REs.
2144
:param re_string: Text form of regular expression.
2145
:param flags: eg re.IGNORECASE
2146
:param where: Message explaining to the user the context where
2147
it occurred, eg 'log search filter'.
2149
# from https://bugs.launchpad.net/bzr/+bug/251352
2151
re_obj = re.compile(re_string, flags)
2156
where = ' in ' + where
2157
# despite the name 'error' is a type
2158
raise errors.BzrCommandError('Invalid regular expression%s: %r: %s'
2159
% (where, re_string, e))
2162
if sys.platform == "win32":
2165
return msvcrt.getch()
2170
fd = sys.stdin.fileno()
2171
settings = termios.tcgetattr(fd)
2174
ch = sys.stdin.read(1)
2176
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2180
if sys.platform == 'linux2':
2181
def _local_concurrency():
2183
prefix = 'processor'
2184
for line in file('/proc/cpuinfo', 'rb'):
2185
if line.startswith(prefix):
2186
concurrency = int(line[line.find(':')+1:]) + 1
2188
elif sys.platform == 'darwin':
2189
def _local_concurrency():
2190
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2191
stdout=subprocess.PIPE).communicate()[0]
2192
elif sys.platform[0:7] == 'freebsd':
2193
def _local_concurrency():
2194
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2195
stdout=subprocess.PIPE).communicate()[0]
2196
elif sys.platform == 'sunos5':
2197
def _local_concurrency():
2198
return subprocess.Popen(['psrinfo', '-p',],
2199
stdout=subprocess.PIPE).communicate()[0]
2200
elif sys.platform == "win32":
2201
def _local_concurrency():
2202
# This appears to return the number of cores.
2203
return os.environ.get('NUMBER_OF_PROCESSORS')
2205
def _local_concurrency():
2210
_cached_local_concurrency = None
2212
def local_concurrency(use_cache=True):
2213
"""Return how many processes can be run concurrently.
2215
Rely on platform specific implementations and default to 1 (one) if
2216
anything goes wrong.
2218
global _cached_local_concurrency
2220
if _cached_local_concurrency is not None and use_cache:
2221
return _cached_local_concurrency
2223
concurrency = os.environ.get('BZR_CONCURRENCY', None)
2224
if concurrency is None:
2226
concurrency = _local_concurrency()
2227
except (OSError, IOError):
2230
concurrency = int(concurrency)
2231
except (TypeError, ValueError):
2234
_cached_concurrency = concurrency
2238
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2239
"""A stream writer that doesn't decode str arguments."""
2241
def __init__(self, encode, stream, errors='strict'):
2242
codecs.StreamWriter.__init__(self, stream, errors)
2243
self.encode = encode
2245
def write(self, object):
2246
if type(object) is str:
2247
self.stream.write(object)
2249
data, _ = self.encode(object, self.errors)
2250
self.stream.write(data)
2252
if sys.platform == 'win32':
2253
def open_file(filename, mode='r', bufsize=-1):
2254
"""This function is used to override the ``open`` builtin.
2256
But it uses O_NOINHERIT flag so the file handle is not inherited by
2257
child processes. Deleting or renaming a closed file opened with this
2258
function is not blocking child processes.
2260
writing = 'w' in mode
2261
appending = 'a' in mode
2262
updating = '+' in mode
2263
binary = 'b' in mode
2266
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2267
# for flags for each modes.
2277
flags |= os.O_WRONLY
2278
flags |= os.O_CREAT | os.O_TRUNC
2283
flags |= os.O_WRONLY
2284
flags |= os.O_CREAT | os.O_APPEND
2289
flags |= os.O_RDONLY
2291
return os.fdopen(os.open(filename, flags), mode, bufsize)