24
34
realpath as _nt_realpath,
25
35
splitdrive as _nt_splitdrive,
28
from os import listdir
33
from shutil import copyfile
35
from stat import (S_ISREG, S_ISDIR, S_ISLNK, ST_MODE, ST_SIZE,
36
S_ISCHR, S_ISBLK, S_ISFIFO, S_ISSOCK)
45
from tempfile import (
58
from bzrlib.symbol_versioning import (
63
# sha and md5 modules are deprecated in python2.6 but hashlib is available as
65
if sys.version_info < (2, 5):
66
import md5 as _mod_md5
68
import sha as _mod_sha
45
from bzrlib.errors import (BzrError,
46
BzrBadParameterNotUnicode,
51
from bzrlib.symbol_versioning import (deprecated_function,
53
from bzrlib.trace import mutter
78
from bzrlib import symbol_versioning
81
# Cross platform wall-clock time functionality with decent resolution.
82
# On Linux ``time.clock`` returns only CPU time. On Windows, ``time.time()``
83
# only has a resolution of ~15ms. Note that ``time.clock()`` is not
84
# synchronized with ``time.time()``, this is only meant to be used to find
85
# delta times by subtracting from another call to this function.
86
timer_func = time.time
87
if sys.platform == 'win32':
88
timer_func = time.clock
56
90
# On win32, O_BINARY is used to indicate the file should
57
91
# be opened in binary mode, rather than text mode.
58
92
# On other platforms, O_BINARY doesn't exist, because
59
93
# they always open in binary mode, so it is okay to
60
# OR with 0 on those platforms
94
# OR with 0 on those platforms.
95
# O_NOINHERIT and O_TEXT exists only on win32 too.
61
96
O_BINARY = getattr(os, 'O_BINARY', 0)
97
O_TEXT = getattr(os, 'O_TEXT', 0)
98
O_NOINHERIT = getattr(os, 'O_NOINHERIT', 0)
101
def get_unicode_argv():
103
user_encoding = get_user_encoding()
104
return [a.decode(user_encoding) for a in sys.argv[1:]]
105
except UnicodeDecodeError:
106
raise errors.BzrError(("Parameter '%r' is unsupported by the current "
64
110
def make_readonly(filename):
65
111
"""Make a filename read-only."""
66
mod = os.stat(filename).st_mode
68
os.chmod(filename, mod)
112
mod = os.lstat(filename).st_mode
113
if not stat.S_ISLNK(mod):
115
os.chmod(filename, mod)
71
118
def make_writable(filename):
72
mod = os.stat(filename).st_mode
74
os.chmod(filename, mod)
119
mod = os.lstat(filename).st_mode
120
if not stat.S_ISLNK(mod):
122
os.chmod(filename, mod)
125
def minimum_path_selection(paths):
126
"""Return the smallset subset of paths which are outside paths.
128
:param paths: A container (and hence not None) of paths.
129
:return: A set of paths sufficient to include everything in paths via
130
is_inside, drawn from the paths parameter.
136
return path.split('/')
137
sorted_paths = sorted(list(paths), key=sort_key)
139
search_paths = [sorted_paths[0]]
140
for path in sorted_paths[1:]:
141
if not is_inside(search_paths[-1], path):
142
# This path is unique, add it
143
search_paths.append(path)
145
return set(search_paths)
482
577
for dirname in dir_list:
483
578
if is_inside(dirname, fname) or is_inside(fname, dirname):
583
def pumpfile(from_file, to_file, read_length=-1, buff_size=32768,
584
report_activity=None, direction='read'):
585
"""Copy contents of one file to another.
587
The read_length can either be -1 to read to end-of-file (EOF) or
588
it can specify the maximum number of bytes to read.
590
The buff_size represents the maximum size for each read operation
591
performed on from_file.
593
:param report_activity: Call this as bytes are read, see
594
Transport._report_activity
595
:param direction: Will be passed to report_activity
597
:return: The number of bytes copied.
601
# read specified number of bytes
603
while read_length > 0:
604
num_bytes_to_read = min(read_length, buff_size)
606
block = from_file.read(num_bytes_to_read)
610
if report_activity is not None:
611
report_activity(len(block), direction)
614
actual_bytes_read = len(block)
615
read_length -= actual_bytes_read
616
length += actual_bytes_read
489
def pumpfile(fromfile, tofile):
490
"""Copy contents of one file to another."""
493
b = fromfile.read(BUFSIZE)
620
block = from_file.read(buff_size)
624
if report_activity is not None:
625
report_activity(len(block), direction)
631
def pump_string_file(bytes, file_handle, segment_size=None):
632
"""Write bytes to file_handle in many smaller writes.
634
:param bytes: The string to write.
635
:param file_handle: The file to write to.
637
# Write data in chunks rather than all at once, because very large
638
# writes fail on some platforms (e.g. Windows with SMB mounted
641
segment_size = 5242880 # 5MB
642
segments = range(len(bytes) / segment_size + 1)
643
write = file_handle.write
644
for segment_index in segments:
645
segment = buffer(bytes, segment_index * segment_size, segment_size)
499
649
def file_iterator(input_file, readsize=32768):
555
733
def local_time_offset(t=None):
556
734
"""Return offset of local zone from GMT, either at present or at time t."""
557
# python2.3 localtime() can't take None
561
if time.localtime(t).tm_isdst and time.daylight:
564
return -time.timezone
567
def format_date(t, offset=0, timezone='original', date_fmt=None,
737
offset = datetime.fromtimestamp(t) - datetime.utcfromtimestamp(t)
738
return offset.days * 86400 + offset.seconds
740
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
741
_default_format_by_weekday_num = [wd + " %Y-%m-%d %H:%M:%S" for wd in weekdays]
744
def format_date(t, offset=0, timezone='original', date_fmt=None,
568
745
show_offset=True):
569
## TODO: Perhaps a global option to use either universal or local time?
570
## Or perhaps just let people set $TZ?
571
assert isinstance(t, float)
746
"""Return a formatted date string.
748
:param t: Seconds since the epoch.
749
:param offset: Timezone offset in seconds east of utc.
750
:param timezone: How to display the time: 'utc', 'original' for the
751
timezone specified by offset, or 'local' for the process's current
753
:param date_fmt: strftime format.
754
:param show_offset: Whether to append the timezone.
756
(date_fmt, tt, offset_str) = \
757
_format_date(t, offset, timezone, date_fmt, show_offset)
758
date_fmt = date_fmt.replace('%a', weekdays[tt[6]])
759
date_str = time.strftime(date_fmt, tt)
760
return date_str + offset_str
763
# Cache of formatted offset strings
767
def format_date_with_offset_in_original_timezone(t, offset=0,
768
_cache=_offset_cache):
769
"""Return a formatted date string in the original timezone.
771
This routine may be faster then format_date.
773
:param t: Seconds since the epoch.
774
:param offset: Timezone offset in seconds east of utc.
778
tt = time.gmtime(t + offset)
779
date_fmt = _default_format_by_weekday_num[tt[6]]
780
date_str = time.strftime(date_fmt, tt)
781
offset_str = _cache.get(offset, None)
782
if offset_str is None:
783
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
784
_cache[offset] = offset_str
785
return date_str + offset_str
788
def format_local_date(t, offset=0, timezone='original', date_fmt=None,
790
"""Return an unicode date string formatted according to the current locale.
792
:param t: Seconds since the epoch.
793
:param offset: Timezone offset in seconds east of utc.
794
:param timezone: How to display the time: 'utc', 'original' for the
795
timezone specified by offset, or 'local' for the process's current
797
:param date_fmt: strftime format.
798
:param show_offset: Whether to append the timezone.
800
(date_fmt, tt, offset_str) = \
801
_format_date(t, offset, timezone, date_fmt, show_offset)
802
date_str = time.strftime(date_fmt, tt)
803
if not isinstance(date_str, unicode):
804
date_str = date_str.decode(get_user_encoding(), 'replace')
805
return date_str + offset_str
808
def _format_date(t, offset, timezone, date_fmt, show_offset):
573
809
if timezone == 'utc':
574
810
tt = time.gmtime(t)
581
817
tt = time.localtime(t)
582
818
offset = local_time_offset(t)
584
raise BzrError("unsupported timezone format %r" % timezone,
585
['options are "utc", "original", "local"'])
820
raise errors.UnsupportedTimezoneFormat(timezone)
586
821
if date_fmt is None:
587
822
date_fmt = "%a %Y-%m-%d %H:%M:%S"
589
824
offset_str = ' %+03d%02d' % (offset / 3600, (offset / 60) % 60)
592
return (time.strftime(date_fmt, tt) + offset_str)
827
return (date_fmt, tt, offset_str)
595
830
def compact_date(when):
596
831
return time.strftime('%Y%m%d%H%M%S', time.gmtime(when))
834
def format_delta(delta):
835
"""Get a nice looking string for a time delta.
837
:param delta: The time difference in seconds, can be positive or negative.
838
positive indicates time in the past, negative indicates time in the
839
future. (usually time.time() - stored_time)
840
:return: String formatted to show approximate resolution
846
direction = 'in the future'
850
if seconds < 90: # print seconds up to 90 seconds
852
return '%d second %s' % (seconds, direction,)
854
return '%d seconds %s' % (seconds, direction)
856
minutes = int(seconds / 60)
857
seconds -= 60 * minutes
862
if minutes < 90: # print minutes, seconds up to 90 minutes
864
return '%d minute, %d second%s %s' % (
865
minutes, seconds, plural_seconds, direction)
867
return '%d minutes, %d second%s %s' % (
868
minutes, seconds, plural_seconds, direction)
870
hours = int(minutes / 60)
871
minutes -= 60 * hours
878
return '%d hour, %d minute%s %s' % (hours, minutes,
879
plural_minutes, direction)
880
return '%d hours, %d minute%s %s' % (hours, minutes,
881
plural_minutes, direction)
601
884
"""Return size of given open file."""
669
raise BzrError("sorry, %r not allowed in path" % f)
936
raise errors.BzrError("sorry, %r not allowed in path" % f)
670
937
elif (f == '.') or (f == ''):
677
assert isinstance(p, list)
679
946
if (f == '..') or (f is None) or (f == ''):
680
raise BzrError("sorry, %r not allowed in path" % f)
947
raise errors.BzrError("sorry, %r not allowed in path" % f)
681
948
return pathjoin(*p)
684
@deprecated_function(zero_nine)
685
def appendpath(p1, p2):
689
return pathjoin(p1, p2)
951
def parent_directories(filename):
952
"""Return the list of parent directories, deepest first.
954
For example, parent_directories("a/b/c") -> ["a/b", "a"].
957
parts = splitpath(dirname(filename))
959
parents.append(joinpath(parts))
964
_extension_load_failures = []
967
def failed_to_load_extension(exception):
968
"""Handle failing to load a binary extension.
970
This should be called from the ImportError block guarding the attempt to
971
import the native extension. If this function returns, the pure-Python
972
implementation should be loaded instead::
975
>>> import bzrlib._fictional_extension_pyx
976
>>> except ImportError, e:
977
>>> bzrlib.osutils.failed_to_load_extension(e)
978
>>> import bzrlib._fictional_extension_py
980
# NB: This docstring is just an example, not a doctest, because doctest
981
# currently can't cope with the use of lazy imports in this namespace --
984
# This currently doesn't report the failure at the time it occurs, because
985
# they tend to happen very early in startup when we can't check config
986
# files etc, and also we want to report all failures but not spam the user
988
from bzrlib import trace
989
exception_str = str(exception)
990
if exception_str not in _extension_load_failures:
991
trace.mutter("failed to load compiled extension: %s" % exception_str)
992
_extension_load_failures.append(exception_str)
995
def report_extension_load_failures():
996
if not _extension_load_failures:
998
from bzrlib.config import GlobalConfig
999
if GlobalConfig().get_user_option_as_bool('ignore_missing_extensions'):
1001
# the warnings framework should by default show this only once
1002
from bzrlib.trace import warning
1004
"bzr: warning: some compiled extensions could not be loaded; "
1005
"see <https://answers.launchpad.net/bzr/+faq/703>")
1006
# we no longer show the specific missing extensions here, because it makes
1007
# the message too long and scary - see
1008
# https://bugs.launchpad.net/bzr/+bug/430529
1012
from bzrlib._chunks_to_lines_pyx import chunks_to_lines
1013
except ImportError, e:
1014
failed_to_load_extension(e)
1015
from bzrlib._chunks_to_lines_py import chunks_to_lines
692
1018
def split_lines(s):
693
1019
"""Split s into lines, but without removing the newline characters."""
1020
# Trivially convert a fulltext into a 'chunked' representation, and let
1021
# chunks_to_lines do the heavy lifting.
1022
if isinstance(s, str):
1023
# chunks_to_lines only supports 8-bit strings
1024
return chunks_to_lines([s])
1026
return _split_lines(s)
1029
def _split_lines(s):
1030
"""Split s into lines, but without removing the newline characters.
1032
This supports Unicode or plain string objects.
694
1034
lines = s.split('\n')
695
1035
result = [line + '\n' for line in lines[:-1]]
761
1159
avoids that problem.
764
assert len(base) >= MIN_ABS_PATHLENGTH, ('Length of base must be equal or'
765
' exceed the platform minimum length (which is %d)' %
1162
if len(base) < MIN_ABS_PATHLENGTH:
1163
# must have space for e.g. a drive letter
1164
raise ValueError('%r is too short to calculate a relative path'
768
1167
rp = abspath(path)
772
while len(head) >= len(base):
1172
if len(head) <= len(base) and head != base:
1173
raise errors.PathNotChild(rp, base)
773
1174
if head == base:
775
head, tail = os.path.split(head)
1176
head, tail = split(head)
779
raise PathNotChild(rp, base)
1181
return pathjoin(*reversed(s))
1186
def _cicp_canonical_relpath(base, path):
1187
"""Return the canonical path relative to base.
1189
Like relpath, but on case-insensitive-case-preserving file-systems, this
1190
will return the relpath as stored on the file-system rather than in the
1191
case specified in the input string, for all existing portions of the path.
1193
This will cause O(N) behaviour if called for every path in a tree; if you
1194
have a number of paths to convert, you should use canonical_relpaths().
1196
# TODO: it should be possible to optimize this for Windows by using the
1197
# win32 API FindFiles function to look for the specified name - but using
1198
# os.listdir() still gives us the correct, platform agnostic semantics in
1201
rel = relpath(base, path)
1202
# '.' will have been turned into ''
1206
abs_base = abspath(base)
1208
_listdir = os.listdir
1210
# use an explicit iterator so we can easily consume the rest on early exit.
1211
bit_iter = iter(rel.split('/'))
1212
for bit in bit_iter:
1215
next_entries = _listdir(current)
1216
except OSError: # enoent, eperm, etc
1217
# We can't find this in the filesystem, so just append the
1219
current = pathjoin(current, bit, *list(bit_iter))
1221
for look in next_entries:
1222
if lbit == look.lower():
1223
current = pathjoin(current, look)
1226
# got to the end, nothing matched, so we just return the
1227
# non-existing bits as they were specified (the filename may be
1228
# the target of a move, for example).
1229
current = pathjoin(current, bit, *list(bit_iter))
1231
return current[len(abs_base):].lstrip('/')
1233
# XXX - TODO - we need better detection/integration of case-insensitive
1234
# file-systems; Linux often sees FAT32 devices (or NFS-mounted OSX
1235
# filesystems), for example, so could probably benefit from the same basic
1236
# support there. For now though, only Windows and OSX get that support, and
1237
# they get it for *all* file-systems!
1238
if sys.platform in ('win32', 'darwin'):
1239
canonical_relpath = _cicp_canonical_relpath
1241
canonical_relpath = relpath
1243
def canonical_relpaths(base, paths):
1244
"""Create an iterable to canonicalize a sequence of relative paths.
1246
The intent is for this implementation to use a cache, vastly speeding
1247
up multiple transformations in the same directory.
1249
# but for now, we haven't optimized...
1250
return [canonical_relpath(base, p) for p in paths]
787
1252
def safe_unicode(unicode_or_utf8_string):
788
1253
"""Coerce unicode_or_utf8_string into unicode.
790
1255
If it is unicode, it is returned.
791
Otherwise it is decoded from utf-8. If a decoding error
792
occurs, it is wrapped as a If the decoding fails, the exception is wrapped
793
as a BzrBadParameter exception.
1256
Otherwise it is decoded from utf-8. If decoding fails, the exception is
1257
wrapped in a BzrBadParameterNotUnicode exception.
795
1259
if isinstance(unicode_or_utf8_string, unicode):
796
1260
return unicode_or_utf8_string
798
1262
return unicode_or_utf8_string.decode('utf8')
799
1263
except UnicodeDecodeError:
800
raise BzrBadParameterNotUnicode(unicode_or_utf8_string)
1264
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1267
def safe_utf8(unicode_or_utf8_string):
1268
"""Coerce unicode_or_utf8_string to a utf8 string.
1270
If it is a str, it is returned.
1271
If it is Unicode, it is encoded into a utf-8 string.
1273
if isinstance(unicode_or_utf8_string, str):
1274
# TODO: jam 20070209 This is overkill, and probably has an impact on
1275
# performance if we are dealing with lots of apis that want a
1278
# Make sure it is a valid utf-8 string
1279
unicode_or_utf8_string.decode('utf-8')
1280
except UnicodeDecodeError:
1281
raise errors.BzrBadParameterNotUnicode(unicode_or_utf8_string)
1282
return unicode_or_utf8_string
1283
return unicode_or_utf8_string.encode('utf-8')
1286
_revision_id_warning = ('Unicode revision ids were deprecated in bzr 0.15.'
1287
' Revision id generators should be creating utf8'
1291
def safe_revision_id(unicode_or_utf8_string, warn=True):
1292
"""Revision ids should now be utf8, but at one point they were unicode.
1294
:param unicode_or_utf8_string: A possibly Unicode revision_id. (can also be
1296
:param warn: Functions that are sanitizing user data can set warn=False
1297
:return: None or a utf8 revision id.
1299
if (unicode_or_utf8_string is None
1300
or unicode_or_utf8_string.__class__ == str):
1301
return unicode_or_utf8_string
1303
symbol_versioning.warn(_revision_id_warning, DeprecationWarning,
1305
return cache_utf8.encode(unicode_or_utf8_string)
1308
_file_id_warning = ('Unicode file ids were deprecated in bzr 0.15. File id'
1309
' generators should be creating utf8 file ids.')
1312
def safe_file_id(unicode_or_utf8_string, warn=True):
1313
"""File ids should now be utf8, but at one point they were unicode.
1315
This is the same as safe_utf8, except it uses the cached encode functions
1316
to save a little bit of performance.
1318
:param unicode_or_utf8_string: A possibly Unicode file_id. (can also be
1320
:param warn: Functions that are sanitizing user data can set warn=False
1321
:return: None or a utf8 file id.
1323
if (unicode_or_utf8_string is None
1324
or unicode_or_utf8_string.__class__ == str):
1325
return unicode_or_utf8_string
1327
symbol_versioning.warn(_file_id_warning, DeprecationWarning,
1329
return cache_utf8.encode(unicode_or_utf8_string)
803
1332
_platform_normalizes_filenames = False
844
1373
normalized_filename = _inaccessible_normalized_filename
1376
def set_signal_handler(signum, handler, restart_syscall=True):
1377
"""A wrapper for signal.signal that also calls siginterrupt(signum, False)
1378
on platforms that support that.
1380
:param restart_syscall: if set, allow syscalls interrupted by a signal to
1381
automatically restart (by calling `signal.siginterrupt(signum,
1382
False)`). May be ignored if the feature is not available on this
1383
platform or Python version.
1387
siginterrupt = signal.siginterrupt
1389
# This python implementation doesn't provide signal support, hence no
1392
except AttributeError:
1393
# siginterrupt doesn't exist on this platform, or for this version
1395
siginterrupt = lambda signum, flag: None
1397
def sig_handler(*args):
1398
# Python resets the siginterrupt flag when a signal is
1399
# received. <http://bugs.python.org/issue8354>
1400
# As a workaround for some cases, set it back the way we want it.
1401
siginterrupt(signum, False)
1402
# Now run the handler function passed to set_signal_handler.
1405
sig_handler = handler
1406
old_handler = signal.signal(signum, sig_handler)
1408
siginterrupt(signum, False)
1412
default_terminal_width = 80
1413
"""The default terminal width for ttys.
1415
This is defined so that higher levels can share a common fallback value when
1416
terminal_width() returns None.
847
1420
def terminal_width():
848
"""Return estimated terminal width."""
849
if sys.platform == 'win32':
850
import bzrlib.win32console
851
return bzrlib.win32console.get_console_size()[0]
1421
"""Return terminal width.
1423
None is returned if the width can't established precisely.
1426
- if BZR_COLUMNS is set, returns its value
1427
- if there is no controlling terminal, returns None
1428
- if COLUMNS is set, returns its value,
1430
From there, we need to query the OS to get the size of the controlling
1434
- get termios.TIOCGWINSZ
1435
- if an error occurs or a negative value is obtained, returns None
1439
- win32utils.get_console_size() decides,
1440
- returns None on error (provided default value)
1443
# If BZR_COLUMNS is set, take it, user is always right
1445
return int(os.environ['BZR_COLUMNS'])
1446
except (KeyError, ValueError):
1449
isatty = getattr(sys.stdout, 'isatty', None)
1450
if isatty is None or not isatty():
1451
# Don't guess, setting BZR_COLUMNS is the recommended way to override.
1454
# If COLUMNS is set, take it, the terminal knows better (even inside a
1455
# given terminal, the application can decide to set COLUMNS to a lower
1456
# value (splitted screen) or a bigger value (scroll bars))
1458
return int(os.environ['COLUMNS'])
1459
except (KeyError, ValueError):
1462
width, height = _terminal_size(None, None)
1464
# Consider invalid values as meaning no width
1470
def _win32_terminal_size(width, height):
1471
width, height = win32utils.get_console_size(defaultx=width, defaulty=height)
1472
return width, height
1475
def _ioctl_terminal_size(width, height):
854
1477
import struct, fcntl, termios
855
1478
s = struct.pack('HHHH', 0, 0, 0, 0)
856
1479
x = fcntl.ioctl(1, termios.TIOCGWINSZ, s)
857
width = struct.unpack('HHHH', x)[1]
1480
height, width = struct.unpack('HHHH', x)[0:2]
1481
except (IOError, AttributeError):
1483
return width, height
1485
_terminal_size = None
1486
"""Returns the terminal size as (width, height).
1488
:param width: Default value for width.
1489
:param height: Default value for height.
1491
This is defined specifically for each OS and query the size of the controlling
1492
terminal. If any error occurs, the provided default values should be returned.
1494
if sys.platform == 'win32':
1495
_terminal_size = _win32_terminal_size
1497
_terminal_size = _ioctl_terminal_size
1500
def _terminal_size_changed(signum, frame):
1501
"""Set COLUMNS upon receiving a SIGnal for WINdow size CHange."""
1502
width, height = _terminal_size(None, None)
1503
if width is not None:
1504
os.environ['COLUMNS'] = str(width)
1507
_registered_sigwinch = False
1508
def watch_sigwinch():
1509
"""Register for SIGWINCH, once and only once.
1511
Do nothing if the signal module is not available.
1513
global _registered_sigwinch
1514
if not _registered_sigwinch:
862
width = int(os.environ['COLUMNS'])
1517
if getattr(signal, "SIGWINCH", None) is not None:
1518
set_signal_handler(signal.SIGWINCH, _terminal_size_changed)
1520
# python doesn't provide signal support, nothing we can do about it
1522
_registered_sigwinch = True
871
1525
def supports_executable():
872
1526
return sys.platform != "win32"
1529
def supports_posix_readonly():
1530
"""Return True if 'readonly' has POSIX semantics, False otherwise.
1532
Notably, a win32 readonly file cannot be deleted, unlike POSIX where the
1533
directory controls creation/deletion, etc.
1535
And under win32, readonly means that the directory itself cannot be
1536
deleted. The contents of a readonly directory can be changed, unlike POSIX
1537
where files in readonly directories cannot be added, deleted or renamed.
1539
return sys.platform != "win32"
875
1542
def set_or_unset_env(env_variable, value):
876
1543
"""Modify the environment, setting or removing the env_variable.
897
1564
def check_legal_path(path):
898
"""Check whether the supplied path is legal.
1565
"""Check whether the supplied path is legal.
899
1566
This is only required on Windows, so we don't test on other platforms
902
1569
if sys.platform != "win32":
904
1571
if _validWin32PathRE.match(path) is None:
905
raise IllegalPath(path)
1572
raise errors.IllegalPath(path)
1575
_WIN32_ERROR_DIRECTORY = 267 # Similar to errno.ENOTDIR
1577
def _is_error_enotdir(e):
1578
"""Check if this exception represents ENOTDIR.
1580
Unfortunately, python is very inconsistent about the exception
1581
here. The cases are:
1582
1) Linux, Mac OSX all versions seem to set errno == ENOTDIR
1583
2) Windows, Python2.4, uses errno == ERROR_DIRECTORY (267)
1584
which is the windows error code.
1585
3) Windows, Python2.5 uses errno == EINVAL and
1586
winerror == ERROR_DIRECTORY
1588
:param e: An Exception object (expected to be OSError with an errno
1589
attribute, but we should be able to cope with anything)
1590
:return: True if this represents an ENOTDIR error. False otherwise.
1592
en = getattr(e, 'errno', None)
1593
if (en == errno.ENOTDIR
1594
or (sys.platform == 'win32'
1595
and (en == _WIN32_ERROR_DIRECTORY
1596
or (en == errno.EINVAL
1597
and getattr(e, 'winerror', None) == _WIN32_ERROR_DIRECTORY)
908
1603
def walkdirs(top, prefix=""):
909
1604
"""Yield data about all the directories in a tree.
911
1606
This yields all the data about the contents of a directory at a time.
912
1607
After each directory has been yielded, if the caller has mutated the list
913
1608
to exclude some directories, they are then not descended into.
915
1610
The data yielded is of the form:
916
1611
((directory-relpath, directory-path-from-top),
917
[(relpath, basename, kind, lstat), ...]),
1612
[(relpath, basename, kind, lstat, path-from-top), ...]),
918
1613
- directory-relpath is the relative path of the directory being returned
919
1614
with respect to top. prefix is prepended to this.
920
- directory-path-from-root is the path including top for this directory.
1615
- directory-path-from-root is the path including top for this directory.
921
1616
It is suitable for use with os functions.
922
1617
- relpath is the relative path within the subtree being walked.
923
1618
- basename is the basename of the path
925
1620
present within the tree - but it may be recorded as versioned. See
927
1622
- lstat is the stat data *if* the file was statted.
928
- planned, not implemented:
1623
- planned, not implemented:
929
1624
path_from_tree_root is the path from the root of the tree.
931
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
1626
:param prefix: Prefix the relpaths that are yielded with 'prefix'. This
932
1627
allows one to walk a subtree but get paths that are relative to a tree
933
1628
rooted higher up.
934
1629
:return: an iterator over the dirs.
936
1631
#TODO there is a bit of a smell where the results of the directory-
937
# summary in this, and the path from the root, may not agree
1632
# summary in this, and the path from the root, may not agree
938
1633
# depending on top and prefix - i.e. ./foo and foo as a pair leads to
939
1634
# potentially confusing output. We should make this more robust - but
940
1635
# not at a speed cost. RBC 20060731
943
1637
_directory = _directory_kind
945
pending = [(prefix, "", _directory, None, top)]
1638
_listdir = os.listdir
1639
_kind_from_mode = file_kind_from_stat_mode
1640
pending = [(safe_unicode(prefix), "", _directory, None, safe_unicode(top))]
948
currentdir = pending.pop()
949
1642
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
952
relroot = currentdir[0] + '/'
1643
relroot, _, _, _, top = pending.pop()
1645
relprefix = relroot + u'/'
1648
top_slash = top + u'/'
1651
append = dirblock.append
1653
names = sorted(_listdir(top))
1655
if not _is_error_enotdir(e):
1659
abspath = top_slash + name
1660
statvalue = _lstat(abspath)
1661
kind = _kind_from_mode(statvalue.st_mode)
1662
append((relprefix + name, name, kind, statvalue, abspath))
1663
yield (relroot, top), dirblock
1665
# push the user specified dirs from dirblock
1666
pending.extend(d for d in reversed(dirblock) if d[2] == _directory)
1669
class DirReader(object):
1670
"""An interface for reading directories."""
1672
def top_prefix_to_starting_dir(self, top, prefix=""):
1673
"""Converts top and prefix to a starting dir entry
1675
:param top: A utf8 path
1676
:param prefix: An optional utf8 path to prefix output relative paths
1678
:return: A tuple starting with prefix, and ending with the native
1681
raise NotImplementedError(self.top_prefix_to_starting_dir)
1683
def read_dir(self, prefix, top):
1684
"""Read a specific dir.
1686
:param prefix: A utf8 prefix to be preprended to the path basenames.
1687
:param top: A natively encoded path to read.
1688
:return: A list of the directories contents. Each item contains:
1689
(utf8_relpath, utf8_name, kind, lstatvalue, native_abspath)
1691
raise NotImplementedError(self.read_dir)
1694
_selected_dir_reader = None
1697
def _walkdirs_utf8(top, prefix=""):
1698
"""Yield data about all the directories in a tree.
1700
This yields the same information as walkdirs() only each entry is yielded
1701
in utf-8. On platforms which have a filesystem encoding of utf8 the paths
1702
are returned as exact byte-strings.
1704
:return: yields a tuple of (dir_info, [file_info])
1705
dir_info is (utf8_relpath, path-from-top)
1706
file_info is (utf8_relpath, utf8_name, kind, lstat, path-from-top)
1707
if top is an absolute path, path-from-top is also an absolute path.
1708
path-from-top might be unicode or utf8, but it is the correct path to
1709
pass to os functions to affect the file in question. (such as os.lstat)
1711
global _selected_dir_reader
1712
if _selected_dir_reader is None:
1713
fs_encoding = _fs_enc.upper()
1714
if sys.platform == "win32" and win32utils.winver == 'Windows NT':
1715
# Win98 doesn't have unicode apis like FindFirstFileW
1716
# TODO: We possibly could support Win98 by falling back to the
1717
# original FindFirstFile, and using TCHAR instead of WCHAR,
1718
# but that gets a bit tricky, and requires custom compiling
1721
from bzrlib._walkdirs_win32 import Win32ReadDir
1722
_selected_dir_reader = Win32ReadDir()
1725
elif fs_encoding in ('UTF-8', 'US-ASCII', 'ANSI_X3.4-1968'):
1726
# ANSI_X3.4-1968 is a form of ASCII
1728
from bzrlib._readdir_pyx import UTF8DirReader
1729
_selected_dir_reader = UTF8DirReader()
1730
except ImportError, e:
1731
failed_to_load_extension(e)
1734
if _selected_dir_reader is None:
1735
# Fallback to the python version
1736
_selected_dir_reader = UnicodeDirReader()
1738
# 0 - relpath, 1- basename, 2- kind, 3- stat, 4-toppath
1739
# But we don't actually uses 1-3 in pending, so set them to None
1740
pending = [[_selected_dir_reader.top_prefix_to_starting_dir(top, prefix)]]
1741
read_dir = _selected_dir_reader.read_dir
1742
_directory = _directory_kind
1744
relroot, _, _, _, top = pending[-1].pop()
1747
dirblock = sorted(read_dir(relroot, top))
1748
yield (relroot, top), dirblock
1749
# push the user specified dirs from dirblock
1750
next = [d for d in reversed(dirblock) if d[2] == _directory]
1752
pending.append(next)
1755
class UnicodeDirReader(DirReader):
1756
"""A dir reader for non-utf8 file systems, which transcodes."""
1758
__slots__ = ['_utf8_encode']
1761
self._utf8_encode = codecs.getencoder('utf8')
1763
def top_prefix_to_starting_dir(self, top, prefix=""):
1764
"""See DirReader.top_prefix_to_starting_dir."""
1765
return (safe_utf8(prefix), None, None, None, safe_unicode(top))
1767
def read_dir(self, prefix, top):
1768
"""Read a single directory from a non-utf8 file system.
1770
top, and the abspath element in the output are unicode, all other paths
1771
are utf8. Local disk IO is done via unicode calls to listdir etc.
1773
This is currently the fallback code path when the filesystem encoding is
1774
not UTF-8. It may be better to implement an alternative so that we can
1775
safely handle paths that are not properly decodable in the current
1778
See DirReader.read_dir for details.
1780
_utf8_encode = self._utf8_encode
1782
_listdir = os.listdir
1783
_kind_from_mode = file_kind_from_stat_mode
1786
relprefix = prefix + '/'
1789
top_slash = top + u'/'
1792
append = dirblock.append
955
1793
for name in sorted(_listdir(top)):
956
abspath = top + '/' + name
957
statvalue = lstat(abspath)
958
dirblock.append((relroot + name, name,
959
file_kind_from_stat_mode(statvalue.st_mode),
961
yield (currentdir[0], top), dirblock
962
# push the user specified dirs from dirblock
963
for dir in reversed(dirblock):
964
if dir[2] == _directory:
1795
name_utf8 = _utf8_encode(name)[0]
1796
except UnicodeDecodeError:
1797
raise errors.BadFilenameEncoding(
1798
_utf8_encode(relprefix)[0] + name, _fs_enc)
1799
abspath = top_slash + name
1800
statvalue = _lstat(abspath)
1801
kind = _kind_from_mode(statvalue.st_mode)
1802
append((relprefix + name_utf8, name_utf8, kind, statvalue, abspath))
968
1806
def copy_tree(from_path, to_path, handlers={}):
969
1807
"""Copy all of the entries in from_path into to_path.
971
:param from_path: The base directory to copy.
1809
:param from_path: The base directory to copy.
972
1810
:param to_path: The target directory. If it does not exist, it will
974
1812
:param handlers: A dictionary of functions, which takes a source and
1052
_cached_user_encoding = locale.getpreferredencoding()
1926
user_encoding = locale.getpreferredencoding()
1053
1927
except locale.Error, e:
1054
1928
sys.stderr.write('bzr: warning: %s\n'
1055
' Could not what text encoding to use.\n'
1929
' Could not determine what text encoding to use.\n'
1056
1930
' This error usually means your Python interpreter\n'
1057
1931
' doesn\'t support the locale set by $LANG (%s)\n'
1058
1932
" Continuing with ascii encoding.\n"
1059
1933
% (e, os.environ.get('LANG')))
1061
if _cached_user_encoding is None:
1062
_cached_user_encoding = 'ascii'
1063
return _cached_user_encoding
1934
user_encoding = 'ascii'
1936
# Windows returns 'cp0' to indicate there is no code page. So we'll just
1937
# treat that as ASCII, and not support printing unicode characters to the
1940
# For python scripts run under vim, we get '', so also treat that as ASCII
1941
if user_encoding in (None, 'cp0', ''):
1942
user_encoding = 'ascii'
1946
codecs.lookup(user_encoding)
1948
sys.stderr.write('bzr: warning:'
1949
' unknown encoding %s.'
1950
' Continuing with ascii encoding.\n'
1953
user_encoding = 'ascii'
1956
_cached_user_encoding = user_encoding
1958
return user_encoding
1961
def get_host_name():
1962
"""Return the current unicode host name.
1964
This is meant to be used in place of socket.gethostname() because that
1965
behaves inconsistently on different platforms.
1967
if sys.platform == "win32":
1969
return win32utils.get_host_name()
1972
return socket.gethostname().decode(get_user_encoding())
1975
# We must not read/write any more than 64k at a time from/to a socket so we
1976
# don't risk "no buffer space available" errors on some platforms. Windows in
1977
# particular is likely to throw WSAECONNABORTED or WSAENOBUFS if given too much
1979
MAX_SOCKET_CHUNK = 64 * 1024
1981
def read_bytes_from_socket(sock, report_activity=None,
1982
max_read_size=MAX_SOCKET_CHUNK):
1983
"""Read up to max_read_size of bytes from sock and notify of progress.
1985
Translates "Connection reset by peer" into file-like EOF (return an
1986
empty string rather than raise an error), and repeats the recv if
1987
interrupted by a signal.
1991
bytes = sock.recv(max_read_size)
1992
except socket.error, e:
1994
if eno == getattr(errno, "WSAECONNRESET", errno.ECONNRESET):
1995
# The connection was closed by the other side. Callers expect
1996
# an empty string to signal end-of-stream.
1998
elif eno == errno.EINTR:
1999
# Retry the interrupted recv.
2003
if report_activity is not None:
2004
report_activity(len(bytes), 'read')
2008
def recv_all(socket, count):
2009
"""Receive an exact number of bytes.
2011
Regular Socket.recv() may return less than the requested number of bytes,
2012
depending on what's in the OS buffer. MSG_WAITALL is not available
2013
on all platforms, but this should work everywhere. This will return
2014
less than the requested amount if the remote end closes.
2016
This isn't optimized and is intended mostly for use in testing.
2019
while len(b) < count:
2020
new = read_bytes_from_socket(socket, None, count - len(b))
2027
def send_all(sock, bytes, report_activity=None):
2028
"""Send all bytes on a socket.
2030
Breaks large blocks in smaller chunks to avoid buffering limitations on
2031
some platforms, and catches EINTR which may be thrown if the send is
2032
interrupted by a signal.
2034
This is preferred to socket.sendall(), because it avoids portability bugs
2035
and provides activity reporting.
2037
:param report_activity: Call this as bytes are read, see
2038
Transport._report_activity
2041
byte_count = len(bytes)
2042
while sent_total < byte_count:
2044
sent = sock.send(buffer(bytes, sent_total, MAX_SOCKET_CHUNK))
2045
except socket.error, e:
2046
if e.args[0] != errno.EINTR:
2050
report_activity(sent, 'write')
2053
def dereference_path(path):
2054
"""Determine the real path to a file.
2056
All parent elements are dereferenced. But the file itself is not
2058
:param path: The original path. May be absolute or relative.
2059
:return: the real path *to* the file
2061
parent, base = os.path.split(path)
2062
# The pathjoin for '.' is a workaround for Python bug #1213894.
2063
# (initial path components aren't dereferenced)
2064
return pathjoin(realpath(pathjoin('.', parent)), base)
2067
def supports_mapi():
2068
"""Return True if we can use MAPI to launch a mail client."""
2069
return sys.platform == "win32"
2072
def resource_string(package, resource_name):
2073
"""Load a resource from a package and return it as a string.
2075
Note: Only packages that start with bzrlib are currently supported.
2077
This is designed to be a lightweight implementation of resource
2078
loading in a way which is API compatible with the same API from
2080
http://peak.telecommunity.com/DevCenter/PkgResources#basic-resource-access.
2081
If and when pkg_resources becomes a standard library, this routine
2084
# Check package name is within bzrlib
2085
if package == "bzrlib":
2086
resource_relpath = resource_name
2087
elif package.startswith("bzrlib."):
2088
package = package[len("bzrlib."):].replace('.', os.sep)
2089
resource_relpath = pathjoin(package, resource_name)
2091
raise errors.BzrError('resource package %s not in bzrlib' % package)
2093
# Map the resource to a file and read its contents
2094
base = dirname(bzrlib.__file__)
2095
if getattr(sys, 'frozen', None): # bzr.exe
2096
base = abspath(pathjoin(base, '..', '..'))
2097
filename = pathjoin(base, resource_relpath)
2098
return open(filename, 'rU').read()
2101
def file_kind_from_stat_mode_thunk(mode):
2102
global file_kind_from_stat_mode
2103
if file_kind_from_stat_mode is file_kind_from_stat_mode_thunk:
2105
from bzrlib._readdir_pyx import UTF8DirReader
2106
file_kind_from_stat_mode = UTF8DirReader().kind_from_mode
2107
except ImportError, e:
2108
# This is one time where we won't warn that an extension failed to
2109
# load. The extension is never available on Windows anyway.
2110
from bzrlib._readdir_py import (
2111
_kind_from_mode as file_kind_from_stat_mode
2113
return file_kind_from_stat_mode(mode)
2114
file_kind_from_stat_mode = file_kind_from_stat_mode_thunk
2117
def file_kind(f, _lstat=os.lstat):
2119
return file_kind_from_stat_mode(_lstat(f).st_mode)
2121
if getattr(e, 'errno', None) in (errno.ENOENT, errno.ENOTDIR):
2122
raise errors.NoSuchFile(f)
2126
def until_no_eintr(f, *a, **kw):
2127
"""Run f(*a, **kw), retrying if an EINTR error occurs.
2129
WARNING: you must be certain that it is safe to retry the call repeatedly
2130
if EINTR does occur. This is typically only true for low-level operations
2131
like os.read. If in any doubt, don't use this.
2133
Keep in mind that this is not a complete solution to EINTR. There is
2134
probably code in the Python standard library and other dependencies that
2135
may encounter EINTR if a signal arrives (and there is signal handler for
2136
that signal). So this function can reduce the impact for IO that bzrlib
2137
directly controls, but it is not a complete solution.
2139
# Borrowed from Twisted's twisted.python.util.untilConcludes function.
2143
except (IOError, OSError), e:
2144
if e.errno == errno.EINTR:
2149
def re_compile_checked(re_string, flags=0, where=""):
2150
"""Return a compiled re, or raise a sensible error.
2152
This should only be used when compiling user-supplied REs.
2154
:param re_string: Text form of regular expression.
2155
:param flags: eg re.IGNORECASE
2156
:param where: Message explaining to the user the context where
2157
it occurred, eg 'log search filter'.
2159
# from https://bugs.launchpad.net/bzr/+bug/251352
2161
re_obj = re.compile(re_string, flags)
2166
where = ' in ' + where
2167
# despite the name 'error' is a type
2168
raise errors.BzrCommandError('Invalid regular expression%s: %r: %s'
2169
% (where, re_string, e))
2172
if sys.platform == "win32":
2175
return msvcrt.getch()
2180
fd = sys.stdin.fileno()
2181
settings = termios.tcgetattr(fd)
2184
ch = sys.stdin.read(1)
2186
termios.tcsetattr(fd, termios.TCSADRAIN, settings)
2190
if sys.platform == 'linux2':
2191
def _local_concurrency():
2193
prefix = 'processor'
2194
for line in file('/proc/cpuinfo', 'rb'):
2195
if line.startswith(prefix):
2196
concurrency = int(line[line.find(':')+1:]) + 1
2198
elif sys.platform == 'darwin':
2199
def _local_concurrency():
2200
return subprocess.Popen(['sysctl', '-n', 'hw.availcpu'],
2201
stdout=subprocess.PIPE).communicate()[0]
2202
elif sys.platform[0:7] == 'freebsd':
2203
def _local_concurrency():
2204
return subprocess.Popen(['sysctl', '-n', 'hw.ncpu'],
2205
stdout=subprocess.PIPE).communicate()[0]
2206
elif sys.platform == 'sunos5':
2207
def _local_concurrency():
2208
return subprocess.Popen(['psrinfo', '-p',],
2209
stdout=subprocess.PIPE).communicate()[0]
2210
elif sys.platform == "win32":
2211
def _local_concurrency():
2212
# This appears to return the number of cores.
2213
return os.environ.get('NUMBER_OF_PROCESSORS')
2215
def _local_concurrency():
2220
_cached_local_concurrency = None
2222
def local_concurrency(use_cache=True):
2223
"""Return how many processes can be run concurrently.
2225
Rely on platform specific implementations and default to 1 (one) if
2226
anything goes wrong.
2228
global _cached_local_concurrency
2230
if _cached_local_concurrency is not None and use_cache:
2231
return _cached_local_concurrency
2233
concurrency = os.environ.get('BZR_CONCURRENCY', None)
2234
if concurrency is None:
2236
concurrency = _local_concurrency()
2237
except (OSError, IOError):
2240
concurrency = int(concurrency)
2241
except (TypeError, ValueError):
2244
_cached_concurrency = concurrency
2248
class UnicodeOrBytesToBytesWriter(codecs.StreamWriter):
2249
"""A stream writer that doesn't decode str arguments."""
2251
def __init__(self, encode, stream, errors='strict'):
2252
codecs.StreamWriter.__init__(self, stream, errors)
2253
self.encode = encode
2255
def write(self, object):
2256
if type(object) is str:
2257
self.stream.write(object)
2259
data, _ = self.encode(object, self.errors)
2260
self.stream.write(data)
2262
if sys.platform == 'win32':
2263
def open_file(filename, mode='r', bufsize=-1):
2264
"""This function is used to override the ``open`` builtin.
2266
But it uses O_NOINHERIT flag so the file handle is not inherited by
2267
child processes. Deleting or renaming a closed file opened with this
2268
function is not blocking child processes.
2270
writing = 'w' in mode
2271
appending = 'a' in mode
2272
updating = '+' in mode
2273
binary = 'b' in mode
2276
# see http://msdn.microsoft.com/en-us/library/yeby3zcb%28VS.71%29.aspx
2277
# for flags for each modes.
2287
flags |= os.O_WRONLY
2288
flags |= os.O_CREAT | os.O_TRUNC
2293
flags |= os.O_WRONLY
2294
flags |= os.O_CREAT | os.O_APPEND
2299
flags |= os.O_RDONLY
2301
return os.fdopen(os.open(filename, flags), mode, bufsize)