14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
21
19
from stat import S_ISREG, S_IEXEC
25
config as _mod_config,
32
lazy_import.lazy_import(globals(), """
21
from bzrlib.lazy_import import lazy_import
22
lazy_import(globals(), """
43
32
revision as _mod_revision,
47
from breezy.bzr import (
51
from breezy.i18n import gettext
53
from .errors import (DuplicateKey, MalformedTransform,
54
ReusingTransform, CantMoveRoot,
55
ImmortalLimbo, NoFinalPath,
57
from .filters import filtered_output_bytes, ContentFilterContext
58
from .mutabletree import MutableTree
59
from .osutils import (
35
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
36
ReusingTransform, NotVersionedError, CantMoveRoot,
37
ExistingLimbo, ImmortalLimbo, NoFinalPath,
39
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
40
from bzrlib.inventory import InventoryEntry
41
from bzrlib.osutils import (
67
from .progress import ProgressPhase
51
from bzrlib.progress import DummyProgress, ProgressPhase
52
from bzrlib.symbol_versioning import (
56
from bzrlib.trace import mutter, warning
57
from bzrlib import tree
59
import bzrlib.urlutils as urlutils
78
62
ROOT_PARENT = "root-parent"
80
65
def unique_add(map, key, value):
82
67
raise DuplicateKey(key=key)
87
71
class _TransformResults(object):
88
72
def __init__(self, modified_paths, rename_count):
89
73
object.__init__(self)
226
202
self.version_file(old_root_file_id, old_root)
227
203
self.unversion_file(self._new_root)
229
def fixup_new_roots(self):
230
"""Reinterpret requests to change the root directory
232
Instead of creating a root directory, or moving an existing directory,
233
all the attributes and children of the new root are applied to the
234
existing root directory.
236
This means that the old root trans-id becomes obsolete, so it is
237
recommended only to invoke this after the root trans-id has become
205
def trans_id_tree_file_id(self, inventory_id):
206
"""Determine the transaction id of a working tree file.
208
This reflects only files that already exist, not ones that will be
209
added by transactions.
241
new_roots = [k for k, v in viewitems(self._new_parent)
243
if len(new_roots) < 1:
245
if len(new_roots) != 1:
246
raise ValueError('A tree cannot have two roots!')
247
if self._new_root is None:
248
self._new_root = new_roots[0]
250
old_new_root = new_roots[0]
251
# unversion the new root's directory.
252
if self.final_kind(self._new_root) is None:
253
file_id = self.final_file_id(old_new_root)
255
file_id = self.final_file_id(self._new_root)
256
if old_new_root in self._new_id:
257
self.cancel_versioning(old_new_root)
259
self.unversion_file(old_new_root)
260
# if, at this stage, root still has an old file_id, zap it so we can
261
# stick a new one in.
262
if (self.tree_file_id(self._new_root) is not None and
263
self._new_root not in self._removed_id):
264
self.unversion_file(self._new_root)
265
if file_id is not None:
266
self.version_file(file_id, self._new_root)
268
# Now move children of new root into old root directory.
269
# Ensure all children are registered with the transaction, but don't
270
# use directly-- some tree children have new parents
271
list(self.iter_tree_children(old_new_root))
272
# Move all children of new root into old root directory.
273
for child in self.by_parent().get(old_new_root, []):
274
self.adjust_path(self.final_name(child), self._new_root, child)
276
# Ensure old_new_root has no directory.
277
if old_new_root in self._new_contents:
278
self.cancel_creation(old_new_root)
280
self.delete_contents(old_new_root)
282
# prevent deletion of root directory.
283
if self._new_root in self._removed_contents:
284
self.cancel_deletion(self._new_root)
286
# destroy path info for old_new_root.
287
del self._new_parent[old_new_root]
288
del self._new_name[old_new_root]
211
if inventory_id is None:
212
raise ValueError('None is not a valid file id')
213
path = self._tree.id2path(inventory_id)
214
return self.trans_id_tree_path(path)
290
216
def trans_id_file_id(self, file_id):
291
217
"""Determine or set the transaction id associated with a file ID.
392
317
return sorted(FinalPaths(self).get_paths(new_ids))
394
319
def _inventory_altered(self):
395
"""Determine which trans_ids need new Inventory entries.
397
An new entry is needed when anything that would be reflected by an
398
inventory entry changes, including file name, file_id, parent file_id,
399
file kind, and the execute bit.
401
Some care is taken to return entries with real changes, not cases
402
where the value is deleted and then restored to its original value,
403
but some actually unchanged values may be returned.
405
:returns: A list of (path, trans_id) for all items requiring an
406
inventory change. Ordered by path.
409
# Find entries whose file_ids are new (or changed).
410
new_file_id = set(t for t in self._new_id
411
if self._new_id[t] != self.tree_file_id(t))
412
for id_set in [self._new_name, self._new_parent, new_file_id,
320
"""Get the trans_ids and paths of files needing new inv entries."""
322
for id_set in [self._new_name, self._new_parent, self._new_id,
413
323
self._new_executability]:
414
changed_ids.update(id_set)
415
# removing implies a kind change
324
new_ids.update(id_set)
416
325
changed_kind = set(self._removed_contents)
418
326
changed_kind.intersection_update(self._new_contents)
419
# Ignore entries that are already known to have changed.
420
changed_kind.difference_update(changed_ids)
421
# to keep only the truly changed ones
422
changed_kind = (t for t in changed_kind
423
if self.tree_kind(t) != self.final_kind(t))
424
# all kind changes will alter the inventory
425
changed_ids.update(changed_kind)
426
# To find entries with changed parent_ids, find parents which existed,
427
# but changed file_id.
428
changed_file_id = set(t for t in new_file_id if t in self._removed_id)
429
# Now add all their children to the set.
430
for parent_trans_id in new_file_id:
431
changed_ids.update(self.iter_tree_children(parent_trans_id))
432
return sorted(FinalPaths(self).get_paths(changed_ids))
327
changed_kind.difference_update(new_ids)
328
changed_kind = (t for t in changed_kind if self.tree_kind(t) !=
330
new_ids.update(changed_kind)
331
return sorted(FinalPaths(self).get_paths(new_ids))
434
333
def final_kind(self, trans_id):
435
334
"""Determine the final file kind, after any changes applied.
437
:return: None if the file does not exist/has no contents. (It is
438
conceivable that a path would be created without the corresponding
439
contents insertion command)
336
Raises NoSuchFile if the file does not exist/has no contents.
337
(It is conceivable that a path would be created without the
338
corresponding contents insertion command)
441
340
if trans_id in self._new_contents:
442
341
return self._new_contents[trans_id]
443
342
elif trans_id in self._removed_contents:
343
raise NoSuchFile(None)
446
345
return self.tree_kind(trans_id)
448
def tree_path(self, trans_id):
449
"""Determine the tree path associated with the trans_id."""
450
return self._tree_id_paths.get(trans_id)
452
347
def tree_file_id(self, trans_id):
453
348
"""Determine the file id associated with the trans_id in the tree"""
454
path = self.tree_path(trans_id)
350
path = self._tree_id_paths[trans_id]
352
# the file is a new, unversioned file, or invalid trans_id
457
354
# the file is old; the old id is still valid
458
355
if self._new_root == trans_id:
571
468
# ensure that all children are registered with the transaction
572
469
list(self.iter_tree_children(parent_id))
574
def _has_named_child(self, name, parent_id, known_children):
575
"""Does a parent already have a name child.
577
:param name: The searched for name.
579
:param parent_id: The parent for which the check is made.
581
:param known_children: The already known children. This should have
582
been recently obtained from `self.by_parent.get(parent_id)`
583
(or will be if None is passed).
585
if known_children is None:
586
known_children = self.by_parent().get(parent_id, [])
587
for child in known_children:
471
def has_named_child(self, by_parent, parent_id, name):
473
children = by_parent[parent_id]
476
for child in children:
588
477
if self.final_name(child) == name:
590
parent_path = self._tree_id_paths.get(parent_id, None)
591
if parent_path is None:
592
# No parent... no children
480
path = self._tree_id_paths[parent_id]
594
child_path = joinpath(parent_path, name)
595
child_id = self._tree_path_ids.get(child_path, None)
483
childpath = joinpath(path, name)
484
child_id = self._tree_path_ids.get(childpath)
596
485
if child_id is None:
597
# Not known by the tree transform yet, check the filesystem
598
return osutils.lexists(self._tree.abspath(child_path))
486
return lexists(self._tree.abspath(childpath))
600
raise AssertionError('child_id is missing: %s, %s, %s'
601
% (name, parent_id, child_id))
603
def _available_backup_name(self, name, target_id):
604
"""Find an available backup name.
606
:param name: The basename of the file.
608
:param target_id: The directory trans_id where the backup should
611
known_children = self.by_parent().get(target_id, [])
612
return osutils.available_backup_name(
614
lambda base: self._has_named_child(
615
base, target_id, known_children))
488
if self.final_parent(child_id) != parent_id:
490
if child_id in self._removed_contents:
491
# XXX What about dangling file-ids?
617
496
def _parent_loops(self):
618
497
"""No entry should be its own ancestor"""
725
609
self._removed_id))
726
610
all_ids = self._tree.all_file_ids()
727
611
active_tree_ids = all_ids.difference(removed_tree_ids)
728
for trans_id, file_id in viewitems(self._new_id):
612
for trans_id, file_id in self._new_id.iteritems():
729
613
if file_id in active_tree_ids:
730
path = self._tree.id2path(file_id)
731
old_trans_id = self.trans_id_tree_path(path)
614
old_trans_id = self.trans_id_tree_file_id(file_id)
732
615
conflicts.append(('duplicate id', old_trans_id, trans_id))
735
618
def _parent_type_conflicts(self, by_parent):
736
"""Children must have a directory parent"""
619
"""parents must have directory 'contents'."""
738
for parent_id, children in viewitems(by_parent):
739
if parent_id == ROOT_PARENT:
742
for child_id in children:
743
if self.final_kind(child_id) is not None:
748
# There is at least a child, so we need an existing directory to
750
kind = self.final_kind(parent_id)
621
for parent_id, children in by_parent.iteritems():
622
if parent_id is ROOT_PARENT:
624
if not self._any_contents(children):
626
for child in children:
628
self.final_kind(child)
632
kind = self.final_kind(parent_id)
752
# The directory will be deleted
753
636
conflicts.append(('missing parent', parent_id))
754
637
elif kind != "directory":
755
# Meh, we need a *directory* to put something in it
756
638
conflicts.append(('non-directory parent', parent_id))
641
def _any_contents(self, trans_ids):
642
"""Return true if any of the trans_ids, will have contents."""
643
for trans_id in trans_ids:
645
kind = self.final_kind(trans_id)
759
651
def _set_executability(self, path, trans_id):
760
652
"""Set the executability of versioned files """
761
if self._tree._supports_executable():
653
if supports_executable():
762
654
new_executability = self._new_executability[trans_id]
763
655
abspath = self._tree.abspath(path)
764
656
current_mode = os.stat(abspath).st_mode
765
657
if new_executability:
766
658
umask = os.umask(0)
768
to_mode = current_mode | (0o100 & ~umask)
660
to_mode = current_mode | (0100 & ~umask)
769
661
# Enable x-bit for others only if they can read it.
770
if current_mode & 0o004:
771
to_mode |= 0o001 & ~umask
772
if current_mode & 0o040:
773
to_mode |= 0o010 & ~umask
662
if current_mode & 0004:
663
to_mode |= 0001 & ~umask
664
if current_mode & 0040:
665
to_mode |= 0010 & ~umask
775
to_mode = current_mode & ~0o111
776
osutils.chmod_if_possible(abspath, to_mode)
667
to_mode = current_mode & ~0111
668
os.chmod(abspath, to_mode)
778
670
def _new_entry(self, name, parent_id, file_id):
779
671
"""Helper function to create a new filesystem entry."""
825
717
self.create_symlink(target, trans_id)
828
def new_orphan(self, trans_id, parent_id):
829
"""Schedule an item to be orphaned.
831
When a directory is about to be removed, its children, if they are not
832
versioned are moved out of the way: they don't have a parent anymore.
834
:param trans_id: The trans_id of the existing item.
835
:param parent_id: The parent trans_id of the item.
837
raise NotImplementedError(self.new_orphan)
839
def _get_potential_orphans(self, dir_id):
840
"""Find the potential orphans in a directory.
842
A directory can't be safely deleted if there are versioned files in it.
843
If all the contained files are unversioned then they can be orphaned.
845
The 'None' return value means that the directory contains at least one
846
versioned file and should not be deleted.
848
:param dir_id: The directory trans id.
850
:return: A list of the orphan trans ids or None if at least one
851
versioned file is present.
854
# Find the potential orphans, stop if one item should be kept
855
for child_tid in self.by_parent()[dir_id]:
856
if child_tid in self._removed_contents:
857
# The child is removed as part of the transform. Since it was
858
# versioned before, it's not an orphan
860
elif self.final_file_id(child_tid) is None:
861
# The child is not versioned
862
orphans.append(child_tid)
864
# We have a versioned file here, searching for orphans is
870
720
def _affected_ids(self):
871
721
"""Return the set of transform ids affected by the transform"""
872
722
trans_ids = set(self._removed_id)
873
trans_ids.update(self._new_id)
723
trans_ids.update(self._new_id.keys())
874
724
trans_ids.update(self._removed_contents)
875
trans_ids.update(self._new_contents)
876
trans_ids.update(self._new_executability)
877
trans_ids.update(self._new_name)
878
trans_ids.update(self._new_parent)
725
trans_ids.update(self._new_contents.keys())
726
trans_ids.update(self._new_executability.keys())
727
trans_ids.update(self._new_name.keys())
728
trans_ids.update(self._new_parent.keys())
881
731
def _get_file_id_maps(self):
1014
865
return _PreviewTree(self)
1016
def commit(self, branch, message, merge_parents=None, strict=False,
1017
timestamp=None, timezone=None, committer=None, authors=None,
1018
revprops=None, revision_id=None):
867
def commit(self, branch, message, merge_parents=None, strict=False):
1019
868
"""Commit the result of this TreeTransform to a branch.
1021
870
:param branch: The branch to commit to.
1022
871
:param message: The message to attach to the commit.
1023
:param merge_parents: Additional parent revision-ids specified by
1025
:param strict: If True, abort the commit if there are unversioned
1027
:param timestamp: if not None, seconds-since-epoch for the time and
1028
date. (May be a float.)
1029
:param timezone: Optional timezone for timestamp, as an offset in
1031
:param committer: Optional committer in email-id format.
1032
(e.g. "J Random Hacker <jrandom@example.com>")
1033
:param authors: Optional list of authors in email-id format.
1034
:param revprops: Optional dictionary of revision properties.
1035
:param revision_id: Optional revision id. (Specifying a revision-id
1036
may reduce performance for some non-native formats.)
872
:param merge_parents: Additional parents specified by pending merges.
1037
873
:return: The revision_id of the revision committed.
1039
875
self._check_malformed()
1100
930
:param serializer: A Serialiser like pack.ContainerSerializer.
1102
932
new_name = dict((k, v.encode('utf-8')) for k, v in
1103
viewitems(self._new_name))
933
self._new_name.items())
1104
934
new_executability = dict((k, int(v)) for k, v in
1105
viewitems(self._new_executability))
935
self._new_executability.items())
1106
936
tree_path_ids = dict((k.encode('utf-8'), v)
1107
for k, v in viewitems(self._tree_path_ids))
937
for k, v in self._tree_path_ids.items())
1109
b'_id_number': self._id_number,
1110
b'_new_name': new_name,
1111
b'_new_parent': self._new_parent,
1112
b'_new_executability': new_executability,
1113
b'_new_id': self._new_id,
1114
b'_tree_path_ids': tree_path_ids,
1115
b'_removed_id': list(self._removed_id),
1116
b'_removed_contents': list(self._removed_contents),
1117
b'_non_present_ids': self._non_present_ids,
939
'_id_number': self._id_number,
940
'_new_name': new_name,
941
'_new_parent': self._new_parent,
942
'_new_executability': new_executability,
943
'_new_id': self._new_id,
944
'_tree_path_ids': tree_path_ids,
945
'_removed_id': list(self._removed_id),
946
'_removed_contents': list(self._removed_contents),
947
'_non_present_ids': self._non_present_ids,
1119
949
yield serializer.bytes_record(bencode.bencode(attribs),
1121
for trans_id, kind in viewitems(self._new_contents):
951
for trans_id, kind in self._new_contents.items():
1122
952
if kind == 'file':
1123
with open(self._limbo_name(trans_id), 'rb') as cur_file:
1124
lines = cur_file.readlines()
953
lines = osutils.chunks_to_lines(
954
self._read_file_chunks(trans_id))
1125
955
parents = self._get_parents_lines(trans_id)
1126
956
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1127
content = b''.join(mpdiff.to_patch())
957
content = ''.join(mpdiff.to_patch())
1128
958
if kind == 'directory':
1130
960
if kind == 'symlink':
1131
961
content = self._read_symlink_target(trans_id)
1132
962
yield serializer.bytes_record(content, ((trans_id, kind),))
1137
967
:param records: An iterable of (names, content) tuples, as per
1138
968
pack.ContainerPushParser.
1140
names, content = next(records)
970
names, content = records.next()
1141
971
attribs = bencode.bdecode(content)
1142
self._id_number = attribs[b'_id_number']
972
self._id_number = attribs['_id_number']
1143
973
self._new_name = dict((k, v.decode('utf-8'))
1144
for k, v in viewitems(attribs[b'_new_name']))
1145
self._new_parent = attribs[b'_new_parent']
1146
self._new_executability = dict((k, bool(v))
1147
for k, v in viewitems(attribs[b'_new_executability']))
1148
self._new_id = attribs[b'_new_id']
1149
self._r_new_id = dict((v, k) for k, v in viewitems(self._new_id))
974
for k, v in attribs['_new_name'].items())
975
self._new_parent = attribs['_new_parent']
976
self._new_executability = dict((k, bool(v)) for k, v in
977
attribs['_new_executability'].items())
978
self._new_id = attribs['_new_id']
979
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1150
980
self._tree_path_ids = {}
1151
981
self._tree_id_paths = {}
1152
for bytepath, trans_id in viewitems(attribs[b'_tree_path_ids']):
982
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1153
983
path = bytepath.decode('utf-8')
1154
984
self._tree_path_ids[path] = trans_id
1155
985
self._tree_id_paths[trans_id] = path
1156
self._removed_id = set(attribs[b'_removed_id'])
1157
self._removed_contents = set(attribs[b'_removed_contents'])
1158
self._non_present_ids = attribs[b'_non_present_ids']
986
self._removed_id = set(attribs['_removed_id'])
987
self._removed_contents = set(attribs['_removed_contents'])
988
self._non_present_ids = attribs['_non_present_ids']
1159
989
for ((trans_id, kind),), content in records:
1160
990
if kind == 'file':
1161
991
mpdiff = multiparent.MultiParent.from_patch(content)
1231
1052
TreeTransformBase.finalize(self)
1233
def _limbo_supports_executable(self):
1234
"""Check if the limbo path supports the executable bit."""
1235
# FIXME: Check actual file system capabilities of limbodir
1236
return osutils.supports_executable()
1238
1054
def _limbo_name(self, trans_id):
1239
1055
"""Generate the limbo name of a file"""
1240
1056
limbo_name = self._limbo_files.get(trans_id)
1241
if limbo_name is None:
1242
limbo_name = self._generate_limbo_path(trans_id)
1243
self._limbo_files[trans_id] = limbo_name
1057
if limbo_name is not None:
1059
parent = self._new_parent.get(trans_id)
1060
# if the parent directory is already in limbo (e.g. when building a
1061
# tree), choose a limbo name inside the parent, to reduce further
1063
use_direct_path = False
1064
if self._new_contents.get(parent) == 'directory':
1065
filename = self._new_name.get(trans_id)
1066
if filename is not None:
1067
if parent not in self._limbo_children:
1068
self._limbo_children[parent] = set()
1069
self._limbo_children_names[parent] = {}
1070
use_direct_path = True
1071
# the direct path can only be used if no other file has
1072
# already taken this pathname, i.e. if the name is unused, or
1073
# if it is already associated with this trans_id.
1074
elif self._case_sensitive_target:
1075
if (self._limbo_children_names[parent].get(filename)
1076
in (trans_id, None)):
1077
use_direct_path = True
1079
for l_filename, l_trans_id in\
1080
self._limbo_children_names[parent].iteritems():
1081
if l_trans_id == trans_id:
1083
if l_filename.lower() == filename.lower():
1086
use_direct_path = True
1089
limbo_name = pathjoin(self._limbo_files[parent], filename)
1090
self._limbo_children[parent].add(trans_id)
1091
self._limbo_children_names[parent][filename] = trans_id
1093
limbo_name = pathjoin(self._limbodir, trans_id)
1094
self._needs_rename.add(trans_id)
1095
self._limbo_files[trans_id] = limbo_name
1244
1096
return limbo_name
1246
def _generate_limbo_path(self, trans_id):
1247
"""Generate a limbo path using the trans_id as the relative path.
1249
This is suitable as a fallback, and when the transform should not be
1250
sensitive to the path encoding of the limbo directory.
1252
self._needs_rename.add(trans_id)
1253
return pathjoin(self._limbodir, trans_id)
1255
1098
def adjust_path(self, name, parent, trans_id):
1256
1099
previous_parent = self._new_parent.get(trans_id)
1257
1100
previous_name = self._new_name.get(trans_id)
1296
1134
descendants.update(self._limbo_descendants(descendant))
1297
1135
return descendants
1299
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1137
def create_file(self, contents, trans_id, mode_id=None):
1300
1138
"""Schedule creation of a new file.
1304
:param contents: an iterator of strings, all of which will be written
1305
to the target destination.
1306
:param trans_id: TreeTransform handle
1307
:param mode_id: If not None, force the mode of the target file to match
1308
the mode of the object referenced by mode_id.
1309
Otherwise, we will try to preserve mode bits of an existing file.
1310
:param sha1: If the sha1 of this content is already known, pass it in.
1311
We can use it to prevent future sha1 computations.
1142
Contents is an iterator of strings, all of which will be written
1143
to the target destination.
1145
New file takes the permissions of any existing file with that id,
1146
unless mode_id is specified.
1313
1148
name = self._limbo_name(trans_id)
1314
with open(name, 'wb') as f:
1315
unique_add(self._new_contents, trans_id, 'file')
1149
f = open(name, 'wb')
1152
unique_add(self._new_contents, trans_id, 'file')
1154
# Clean up the file, it never got registered so
1155
# TreeTransform.finalize() won't clean it up.
1316
1160
f.writelines(contents)
1317
self._set_mtime(name)
1318
1163
self._set_mode(trans_id, mode_id, S_ISREG)
1319
# It is unfortunate we have to use lstat instead of fstat, but we just
1320
# used utime and chmod on the file, so we need the accurate final
1322
if sha1 is not None:
1323
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1165
def _read_file_chunks(self, trans_id):
1166
cur_file = open(self._limbo_name(trans_id), 'rb')
1168
return cur_file.readlines()
1325
1172
def _read_symlink_target(self, trans_id):
1326
1173
return os.readlink(self._limbo_name(trans_id))
1328
def _set_mtime(self, path):
1329
"""All files that are created get the same mtime.
1331
This time is set by the first object to be created.
1333
if self._creation_mtime is None:
1334
self._creation_mtime = time.time()
1335
os.utime(path, (self._creation_mtime, self._creation_mtime))
1337
1175
def create_hardlink(self, path, trans_id):
1338
1176
"""Schedule creation of a hard link"""
1339
1177
name = self._limbo_name(trans_id)
1341
1179
os.link(path, name)
1342
except OSError as e:
1343
1181
if e.errno != errno.EPERM:
1345
1183
raise errors.HardLinkNotSupported(path)
1389
1225
del self._limbo_children_names[trans_id]
1390
1226
delete_any(self._limbo_name(trans_id))
1392
def new_orphan(self, trans_id, parent_id):
1393
conf = self._tree.get_config_stack()
1394
handle_orphan = conf.get('transform.orphan_policy')
1395
handle_orphan(self, trans_id, parent_id)
1398
class OrphaningError(errors.BzrError):
1400
# Only bugs could lead to such exception being seen by the user
1401
internal_error = True
1402
_fmt = "Error while orphaning %s in %s directory"
1404
def __init__(self, orphan, parent):
1405
errors.BzrError.__init__(self)
1406
self.orphan = orphan
1407
self.parent = parent
1410
class OrphaningForbidden(OrphaningError):
1412
_fmt = "Policy: %s doesn't allow creating orphans."
1414
def __init__(self, policy):
1415
errors.BzrError.__init__(self)
1416
self.policy = policy
1419
def move_orphan(tt, orphan_id, parent_id):
1420
"""See TreeTransformBase.new_orphan.
1422
This creates a new orphan in the `brz-orphans` dir at the root of the
1425
:param tt: The TreeTransform orphaning `trans_id`.
1427
:param orphan_id: The trans id that should be orphaned.
1429
:param parent_id: The orphan parent trans id.
1431
# Add the orphan dir if it doesn't exist
1432
orphan_dir_basename = 'brz-orphans'
1433
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1434
if tt.final_kind(od_id) is None:
1435
tt.create_directory(od_id)
1436
parent_path = tt._tree_id_paths[parent_id]
1437
# Find a name that doesn't exist yet in the orphan dir
1438
actual_name = tt.final_name(orphan_id)
1439
new_name = tt._available_backup_name(actual_name, od_id)
1440
tt.adjust_path(new_name, od_id, orphan_id)
1441
trace.warning('%s has been orphaned in %s'
1442
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1445
def refuse_orphan(tt, orphan_id, parent_id):
1446
"""See TreeTransformBase.new_orphan.
1448
This refuses to create orphan, letting the caller handle the conflict.
1450
raise OrphaningForbidden('never')
1453
orphaning_registry = registry.Registry()
1454
orphaning_registry.register(
1455
u'conflict', refuse_orphan,
1456
'Leave orphans in place and create a conflict on the directory.')
1457
orphaning_registry.register(
1458
u'move', move_orphan,
1459
'Move orphans into the brz-orphans directory.')
1460
orphaning_registry._set_default_key(u'conflict')
1463
opt_transform_orphan = _mod_config.RegistryOption(
1464
'transform.orphan_policy', orphaning_registry,
1465
help='Policy for orphaned files during transform operations.',
1469
1229
class TreeTransform(DiskTreeTransform):
1470
1230
"""Represent a tree transformation.
1640
1408
yield self.trans_id_tree_path(childpath)
1642
def _generate_limbo_path(self, trans_id):
1643
"""Generate a limbo path using the final path if possible.
1645
This optimizes the performance of applying the tree transform by
1646
avoiding renames. These renames can be avoided only when the parent
1647
directory is already scheduled for creation.
1649
If the final path cannot be used, falls back to using the trans_id as
1652
parent = self._new_parent.get(trans_id)
1653
# if the parent directory is already in limbo (e.g. when building a
1654
# tree), choose a limbo name inside the parent, to reduce further
1656
use_direct_path = False
1657
if self._new_contents.get(parent) == 'directory':
1658
filename = self._new_name.get(trans_id)
1659
if filename is not None:
1660
if parent not in self._limbo_children:
1661
self._limbo_children[parent] = set()
1662
self._limbo_children_names[parent] = {}
1663
use_direct_path = True
1664
# the direct path can only be used if no other file has
1665
# already taken this pathname, i.e. if the name is unused, or
1666
# if it is already associated with this trans_id.
1667
elif self._case_sensitive_target:
1668
if (self._limbo_children_names[parent].get(filename)
1669
in (trans_id, None)):
1670
use_direct_path = True
1672
for l_filename, l_trans_id in viewitems(
1673
self._limbo_children_names[parent]):
1674
if l_trans_id == trans_id:
1676
if l_filename.lower() == filename.lower():
1679
use_direct_path = True
1681
if not use_direct_path:
1682
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1684
limbo_name = pathjoin(self._limbo_files[parent], filename)
1685
self._limbo_children[parent].add(trans_id)
1686
self._limbo_children_names[parent][filename] = trans_id
1690
1410
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1691
1411
"""Apply all changes to the inventory and filesystem.
1803
1524
If inventory_delta is None, no inventory delta generation is performed.
1805
tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
1806
with ui.ui_factory.nested_progress_bar() as child_pb:
1807
for num, (path, trans_id) in enumerate(tree_paths):
1808
# do not attempt to move root into a subdirectory of itself.
1811
child_pb.update(gettext('removing file'), num, len(tree_paths))
1526
tree_paths = list(self._tree_path_ids.iteritems())
1527
tree_paths.sort(reverse=True)
1528
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1530
for num, data in enumerate(tree_paths):
1531
path, trans_id = data
1532
child_pb.update('removing file', num, len(tree_paths))
1812
1533
full_path = self._tree.abspath(path)
1813
1534
if trans_id in self._removed_contents:
1814
delete_path = os.path.join(self._deletiondir, trans_id)
1815
mover.pre_delete(full_path, delete_path)
1816
elif (trans_id in self._new_name
1817
or trans_id in self._new_parent):
1535
mover.pre_delete(full_path, os.path.join(self._deletiondir,
1537
elif trans_id in self._new_name or trans_id in \
1819
1540
mover.rename(full_path, self._limbo_name(trans_id))
1820
except errors.TransformRenameFailed as e:
1821
1542
if e.errno != errno.ENOENT:
1824
1545
self.rename_count += 1
1826
1549
def _apply_insertions(self, mover):
1827
1550
"""Perform tree operations that insert directory/inventory names.
1837
1560
modified_paths = []
1838
1561
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1840
with ui.ui_factory.nested_progress_bar() as child_pb:
1563
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1841
1565
for num, (path, trans_id) in enumerate(new_paths):
1842
1566
if (num % 10) == 0:
1843
child_pb.update(gettext('adding file'), num, len(new_paths))
1567
child_pb.update('adding file', num, len(new_paths))
1844
1568
full_path = self._tree.abspath(path)
1845
1569
if trans_id in self._needs_rename:
1847
1571
mover.rename(self._limbo_name(trans_id), full_path)
1848
except errors.TransformRenameFailed as e:
1849
1573
# We may be renaming a dangling inventory id
1850
1574
if e.errno != errno.ENOENT:
1853
1577
self.rename_count += 1
1854
# TODO: if trans_id in self._observed_sha1s, we should
1855
# re-stat the final target, since ctime will be
1856
# updated by the change.
1857
1578
if (trans_id in self._new_contents or
1858
1579
self.path_changed(trans_id)):
1859
1580
if trans_id in self._new_contents:
1860
1581
modified_paths.append(full_path)
1861
1582
if trans_id in self._new_executability:
1862
1583
self._set_executability(path, trans_id)
1863
if trans_id in self._observed_sha1s:
1864
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1865
st = osutils.lstat(full_path)
1866
self._observed_sha1s[trans_id] = (o_sha1, st)
1867
for path, trans_id in new_paths:
1868
# new_paths includes stuff like workingtree conflicts. Only the
1869
# stuff in new_contents actually comes from limbo.
1870
if trans_id in self._limbo_files:
1871
del self._limbo_files[trans_id]
1872
1586
self._new_contents.clear()
1873
1587
return modified_paths
1875
def _apply_observed_sha1s(self):
1876
"""After we have finished renaming everything, update observed sha1s
1878
This has to be done after self._tree.apply_inventory_delta, otherwise
1879
it doesn't know anything about the files we are updating. Also, we want
1880
to do this as late as possible, so that most entries end up cached.
1882
# TODO: this doesn't update the stat information for directories. So
1883
# the first 'bzr status' will still need to rewrite
1884
# .bzr/checkout/dirstate. However, we at least don't need to
1885
# re-read all of the files.
1886
# TODO: If the operation took a while, we could do a time.sleep(3) here
1887
# to allow the clock to tick over and ensure we won't have any
1888
# problems. (we could observe start time, and finish time, and if
1889
# it is less than eg 10% overhead, add a sleep call.)
1890
paths = FinalPaths(self)
1891
for trans_id, observed in viewitems(self._observed_sha1s):
1892
path = paths.get_path(trans_id)
1893
# We could get the file_id, but dirstate prefers to use the path
1894
# anyway, and it is 'cheaper' to determine.
1895
# file_id = self._new_id[trans_id]
1896
self._tree._observed_sha1(None, path, observed)
1899
1590
class TransformPreview(DiskTreeTransform):
1900
1591
"""A TreeTransform for generating preview trees.
1981
1669
except errors.NoSuchRevisionInTree:
1982
1670
yield self._get_repository().revision_tree(revision_id)
1984
def _get_file_revision(self, path, file_id, vf, tree_revision):
1986
(file_id, t.get_file_revision(t.id2path(file_id), file_id))
1987
for t in self._iter_parent_trees()]
1672
def _get_file_revision(self, file_id, vf, tree_revision):
1673
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
1674
self._iter_parent_trees()]
1988
1675
vf.add_lines((file_id, tree_revision), parent_keys,
1989
self.get_file_lines(path, file_id))
1676
self.get_file(file_id).readlines())
1990
1677
repo = self._get_repository()
1991
1678
base_vf = repo.texts
1992
1679
if base_vf not in vf.fallback_versionedfiles:
1993
1680
vf.fallback_versionedfiles.append(base_vf)
1994
1681
return tree_revision
1996
def _stat_limbo_file(self, trans_id):
1683
def _stat_limbo_file(self, file_id):
1684
trans_id = self._transform.trans_id_file_id(file_id)
1997
1685
name = self._transform._limbo_name(trans_id)
1998
1686
return os.lstat(name)
2168
1840
ordered_ids = self._list_files_by_dir()
2169
1841
for entry, trans_id in self._make_inv_entries(ordered_ids,
2171
yield self._final_paths.get_path(trans_id), entry
1842
specific_file_ids, yield_parents=yield_parents):
1843
yield unicode(self._final_paths.get_path(trans_id)), entry
2173
1845
def _iter_entries_for_dir(self, dir_path):
2174
1846
"""Return path, entry for items in a directory without recursing down."""
1847
dir_file_id = self.path2id(dir_path)
2175
1848
ordered_ids = []
2176
dir_trans_id = self._path2trans_id(dir_path)
2177
dir_id = self._transform.final_file_id(dir_trans_id)
2178
for child_trans_id in self._all_children(dir_trans_id):
2179
ordered_ids.append((child_trans_id, dir_id))
1849
for file_id in self.iter_children(dir_file_id):
1850
trans_id = self._transform.trans_id_file_id(file_id)
1851
ordered_ids.append((trans_id, file_id))
2181
1852
for entry, trans_id in self._make_inv_entries(ordered_ids):
2182
path_entries.append((self._final_paths.get_path(trans_id), entry))
1853
yield unicode(self._final_paths.get_path(trans_id)), entry
2186
1855
def list_files(self, include_root=False, from_dir=None, recursive=True):
2187
1856
"""See WorkingTree.list_files."""
2209
1878
for path, entry in entries:
2210
1879
yield path, 'V', entry.kind, entry.file_id, entry
2212
def kind(self, path, file_id=None):
2213
trans_id = self._path2trans_id(path)
2214
if trans_id is None:
2215
raise errors.NoSuchFile(path)
1881
def kind(self, file_id):
1882
trans_id = self._transform.trans_id_file_id(file_id)
2216
1883
return self._transform.final_kind(trans_id)
2218
def stored_kind(self, path, file_id=None):
2219
trans_id = self._path2trans_id(path)
2220
if trans_id is None:
2221
raise errors.NoSuchFile(path)
1885
def stored_kind(self, file_id):
1886
trans_id = self._transform.trans_id_file_id(file_id)
2223
1888
return self._transform._new_contents[trans_id]
2224
1889
except KeyError:
2225
return self._transform._tree.stored_kind(path, file_id)
1890
return self._transform._tree.stored_kind(file_id)
2227
def get_file_mtime(self, path, file_id=None):
1892
def get_file_mtime(self, file_id, path=None):
2228
1893
"""See Tree.get_file_mtime"""
2230
file_id = self.path2id(path)
2232
raise errors.NoSuchFile(path)
2233
1894
if not self._content_change(file_id):
2234
return self._transform._tree.get_file_mtime(
2235
self._transform._tree.id2path(file_id), file_id)
2236
trans_id = self._path2trans_id(path)
2237
return self._stat_limbo_file(trans_id).st_mtime
2239
def get_file_size(self, path, file_id=None):
1895
return self._transform._tree.get_file_mtime(file_id)
1896
return self._stat_limbo_file(file_id).st_mtime
1898
def _file_size(self, entry, stat_value):
1899
return self.get_file_size(entry.file_id)
1901
def get_file_size(self, file_id):
2240
1902
"""See Tree.get_file_size"""
2241
trans_id = self._path2trans_id(path)
2242
if trans_id is None:
2243
raise errors.NoSuchFile(path)
2244
kind = self._transform.final_kind(trans_id)
2247
if trans_id in self._transform._new_contents:
2248
return self._stat_limbo_file(trans_id).st_size
2249
if self.kind(path, file_id) == 'file':
2250
return self._transform._tree.get_file_size(path, file_id)
1903
if self.kind(file_id) == 'file':
1904
return self._transform._tree.get_file_size(file_id)
2254
def get_file_verifier(self, path, file_id=None, stat_value=None):
2255
trans_id = self._path2trans_id(path)
2256
if trans_id is None:
2257
raise errors.NoSuchFile(path)
2258
kind = self._transform._new_contents.get(trans_id)
2260
return self._transform._tree.get_file_verifier(path, file_id)
2262
with self.get_file(path, file_id) as fileobj:
2263
return ("SHA1", sha_file(fileobj))
2265
def get_file_sha1(self, path, file_id=None, stat_value=None):
2266
trans_id = self._path2trans_id(path)
2267
if trans_id is None:
2268
raise errors.NoSuchFile(path)
2269
kind = self._transform._new_contents.get(trans_id)
2271
return self._transform._tree.get_file_sha1(path, file_id)
2273
with self.get_file(path, file_id) as fileobj:
1908
def get_file_sha1(self, file_id, path=None, stat_value=None):
1909
trans_id = self._transform.trans_id_file_id(file_id)
1910
kind = self._transform._new_contents.get(trans_id)
1912
return self._transform._tree.get_file_sha1(file_id)
1914
fileobj = self.get_file(file_id)
2274
1916
return sha_file(fileobj)
2276
def is_executable(self, path, file_id=None):
2277
trans_id = self._path2trans_id(path)
2278
if trans_id is None:
1920
def is_executable(self, file_id, path=None):
1923
trans_id = self._transform.trans_id_file_id(file_id)
2281
1925
return self._transform._new_executability[trans_id]
2282
1926
except KeyError:
2284
return self._transform._tree.is_executable(path, file_id)
2285
except OSError as e:
1928
return self._transform._tree.is_executable(file_id, path)
2286
1930
if e.errno == errno.ENOENT:
2289
except errors.NoSuchFile:
1933
except errors.NoSuchId:
2292
def has_filename(self, path):
2293
trans_id = self._path2trans_id(path)
2294
if trans_id in self._transform._new_contents:
2296
elif trans_id in self._transform._removed_contents:
2299
return self._transform._tree.has_filename(path)
2301
1936
def path_content_summary(self, path):
2302
1937
trans_id = self._path2trans_id(path)
2303
1938
tt = self._transform
2349
1985
raise ValueError('want_unversioned is not supported')
2350
1986
return self._transform.iter_changes()
2352
def get_file(self, path, file_id=None):
1988
def get_file(self, file_id, path=None):
2353
1989
"""See Tree.get_file"""
2355
file_id = self.path2id(path)
2356
1990
if not self._content_change(file_id):
2357
return self._transform._tree.get_file(path, file_id)
2358
trans_id = self._path2trans_id(path)
1991
return self._transform._tree.get_file(file_id, path)
1992
trans_id = self._transform.trans_id_file_id(file_id)
2359
1993
name = self._transform._limbo_name(trans_id)
2360
1994
return open(name, 'rb')
2362
def get_file_with_stat(self, path, file_id=None):
2363
return self.get_file(path, file_id), None
1996
def get_file_with_stat(self, file_id, path=None):
1997
return self.get_file(file_id, path), None
2365
def annotate_iter(self, path, file_id=None,
1999
def annotate_iter(self, file_id,
2366
2000
default_revision=_mod_revision.CURRENT_REVISION):
2368
file_id = self.path2id(path)
2369
2001
changes = self._iter_changes_cache.get(file_id)
2370
2002
if changes is None:
2519
2147
:param delta_from_tree: If true, build_tree may use the input Tree to
2520
2148
generate the inventory delta.
2522
with wt.lock_tree_write(), tree.lock_read():
2523
if accelerator_tree is not None:
2524
accelerator_tree.lock_read()
2150
wt.lock_tree_write()
2526
return _build_tree(tree, wt, accelerator_tree, hardlink,
2529
2154
if accelerator_tree is not None:
2530
accelerator_tree.unlock()
2155
accelerator_tree.lock_read()
2157
return _build_tree(tree, wt, accelerator_tree, hardlink,
2160
if accelerator_tree is not None:
2161
accelerator_tree.unlock()
2533
2168
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2534
2169
"""See build_tree."""
2535
for num, _unused in enumerate(wt.all_versioned_paths()):
2170
for num, _unused in enumerate(wt.all_file_ids()):
2536
2171
if num > 0: # more than just a root
2537
2172
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2173
existing_files = set()
2174
for dir, files in wt.walkdirs():
2175
existing_files.update(f[0] for f in files)
2538
2176
file_trans_id = {}
2539
top_pb = ui.ui_factory.nested_progress_bar()
2177
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2540
2178
pp = ProgressPhase("Build phase", 2, top_pb)
2541
if tree.get_root_id() is not None:
2179
if tree.inventory.root is not None:
2542
2180
# This is kind of a hack: we should be altering the root
2543
2181
# as part of the regular tree shape diff logic.
2544
2182
# The conditional test here is to avoid doing an
2647
2280
new_desired_files = desired_files
2649
2282
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2650
unchanged = [(p[0], p[1]) for (f, p, c, v, d, n, k, e)
2651
in iter if not (c or e[0] != e[1])]
2652
if accelerator_tree.supports_content_filtering():
2653
unchanged = [(tp, ap) for (tp, ap) in unchanged
2654
if not next(accelerator_tree.iter_search_rules([ap]))]
2655
unchanged = dict(unchanged)
2283
unchanged = dict((f, p[1]) for (f, p, c, v, d, n, k, e)
2284
in iter if not (c or e[0] != e[1]))
2656
2285
new_desired_files = []
2658
for unused_tree_path, (trans_id, file_id, tree_path, text_sha1) in desired_files:
2659
accelerator_path = unchanged.get(tree_path)
2287
for file_id, (trans_id, tree_path) in desired_files:
2288
accelerator_path = unchanged.get(file_id)
2660
2289
if accelerator_path is None:
2661
new_desired_files.append((tree_path,
2662
(trans_id, file_id, tree_path, text_sha1)))
2290
new_desired_files.append((file_id, (trans_id, tree_path)))
2664
pb.update(gettext('Adding file contents'), count + offset, total)
2292
pb.update('Adding file contents', count + offset, total)
2666
2294
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2669
with accelerator_tree.get_file(accelerator_path, file_id) as f:
2670
chunks = osutils.file_iterator(f)
2671
if wt.supports_content_filtering():
2672
filters = wt._content_filter_stack(tree_path)
2673
chunks = filtered_output_bytes(chunks, filters,
2674
ContentFilterContext(tree_path, tree))
2675
tt.create_file(chunks, trans_id, sha1=text_sha1)
2297
contents = accelerator_tree.get_file(file_id, accelerator_path)
2298
if wt.supports_content_filtering():
2299
filters = wt._content_filter_stack(tree_path)
2300
contents = filtered_output_bytes(contents, filters,
2301
ContentFilterContext(tree_path, tree))
2303
tt.create_file(contents, trans_id)
2307
except AttributeError:
2308
# after filtering, contents may no longer be file-like
2677
2311
offset += count
2678
for count, ((trans_id, file_id, tree_path, text_sha1), contents) in enumerate(
2312
for count, ((trans_id, tree_path), contents) in enumerate(
2679
2313
tree.iter_files_bytes(new_desired_files)):
2680
2314
if wt.supports_content_filtering():
2681
2315
filters = wt._content_filter_stack(tree_path)
2682
2316
contents = filtered_output_bytes(contents, filters,
2683
2317
ContentFilterContext(tree_path, tree))
2684
tt.create_file(contents, trans_id, sha1=text_sha1)
2685
pb.update(gettext('Adding file contents'), count + offset, total)
2318
tt.create_file(contents, trans_id)
2319
pb.update('Adding file contents', count + offset, total)
2688
2322
def _reparent_children(tt, old_parent, new_parent):
2689
2323
for child in tt.iter_tree_children(old_parent):
2690
2324
tt.adjust_path(tt.final_name(child), new_parent, child)
2693
2326
def _reparent_transform_children(tt, old_parent, new_parent):
2694
2327
by_parent = tt.by_parent()
2695
2328
for child in by_parent[old_parent]:
2696
2329
tt.adjust_path(tt.final_name(child), new_parent, child)
2697
2330
return by_parent[old_parent]
2700
def _content_match(tree, entry, tree_path, file_id, kind, target_path):
2332
def _content_match(tree, entry, file_id, kind, target_path):
2701
2333
if entry.kind != kind:
2703
2335
if entry.kind == "directory":
2705
2337
if entry.kind == "file":
2706
with open(target_path, 'rb') as f1, \
2707
tree.get_file(tree_path, file_id) as f2:
2708
if osutils.compare_files(f1, f2):
2338
if tree.get_file(file_id).read() == file(target_path, 'rb').read():
2710
2340
elif entry.kind == "symlink":
2711
if tree.get_symlink_target(tree_path, file_id) == os.readlink(target_path):
2341
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2743
2373
return new_conflicts
2746
def new_by_entry(path, tt, entry, parent_id, tree):
2376
def new_by_entry(tt, entry, parent_id, tree):
2747
2377
"""Create a new file according to its inventory entry"""
2748
2378
name = entry.name
2749
2379
kind = entry.kind
2750
2380
if kind == 'file':
2751
with tree.get_file(path, entry.file_id) as f:
2752
executable = tree.is_executable(path, entry.file_id)
2754
name, parent_id, osutils.file_iterator(f), entry.file_id,
2381
contents = tree.get_file(entry.file_id).readlines()
2382
executable = tree.is_executable(entry.file_id)
2383
return tt.new_file(name, parent_id, contents, entry.file_id,
2756
2385
elif kind in ('directory', 'tree-reference'):
2757
2386
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2758
2387
if kind == 'tree-reference':
2759
2388
tt.set_tree_reference(entry.reference_revision, trans_id)
2760
2389
return trans_id
2761
2390
elif kind == 'symlink':
2762
target = tree.get_symlink_target(path, entry.file_id)
2391
target = tree.get_symlink_target(entry.file_id)
2763
2392
return tt.new_symlink(name, parent_id, target, entry.file_id)
2765
2394
raise errors.BadFileKindError(name, kind)
2768
def create_from_tree(tt, trans_id, tree, path, file_id=None, chunks=None,
2769
filter_tree_path=None):
2770
"""Create new file contents according to tree contents.
2397
@deprecated_function(deprecated_in((1, 9, 0)))
2398
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
2399
"""Create new file contents according to an inventory entry.
2772
:param filter_tree_path: the tree path to use to lookup
2773
content filters to apply to the bytes output in the working tree.
2774
This only applies if the working tree supports content filtering.
2401
DEPRECATED. Use create_from_tree instead.
2776
kind = tree.kind(path, file_id)
2403
if entry.kind == "file":
2405
lines = tree.get_file(entry.file_id).readlines()
2406
tt.create_file(lines, trans_id, mode_id=mode_id)
2407
elif entry.kind == "symlink":
2408
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
2409
elif entry.kind == "directory":
2410
tt.create_directory(trans_id)
2413
def create_from_tree(tt, trans_id, tree, file_id, bytes=None):
2414
"""Create new file contents according to tree contents."""
2415
kind = tree.kind(file_id)
2777
2416
if kind == 'directory':
2778
2417
tt.create_directory(trans_id)
2779
2418
elif kind == "file":
2781
f = tree.get_file(path, file_id)
2782
chunks = osutils.file_iterator(f)
2787
if wt.supports_content_filtering() and filter_tree_path is not None:
2788
filters = wt._content_filter_stack(filter_tree_path)
2789
chunks = filtered_output_bytes(chunks, filters,
2790
ContentFilterContext(filter_tree_path, tree))
2791
tt.create_file(chunks, trans_id)
2420
tree_file = tree.get_file(file_id)
2422
bytes = tree_file.readlines()
2425
tt.create_file(bytes, trans_id)
2795
2426
elif kind == "symlink":
2796
tt.create_symlink(tree.get_symlink_target(path, file_id), trans_id)
2427
tt.create_symlink(tree.get_symlink_target(file_id), trans_id)
2798
2429
raise AssertionError('Unknown kind %r' % kind)
2804
2435
tt.set_executability(entry.executable, trans_id)
2438
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2439
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2442
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2443
"""Produce a backup-style name that appears to be available"""
2447
yield "%s.~%d~" % (name, counter)
2449
for new_name in name_gen():
2450
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2454
def _entry_changes(file_id, entry, working_tree):
2455
"""Determine in which ways the inventory entry has changed.
2457
Returns booleans: has_contents, content_mod, meta_mod
2458
has_contents means there are currently contents, but they differ
2459
contents_mod means contents need to be modified
2460
meta_mod means the metadata needs to be modified
2462
cur_entry = working_tree.inventory[file_id]
2464
working_kind = working_tree.kind(file_id)
2467
has_contents = False
2470
if has_contents is True:
2471
if entry.kind != working_kind:
2472
contents_mod, meta_mod = True, False
2474
cur_entry._read_tree_state(working_tree.id2path(file_id),
2476
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2477
cur_entry._forget_tree_state()
2478
return has_contents, contents_mod, meta_mod
2807
2481
def revert(working_tree, target_tree, filenames, backups=False,
2808
pb=None, change_reporter=None):
2482
pb=DummyProgress(), change_reporter=None):
2809
2483
"""Revert a working tree's contents to those of a target tree."""
2810
pb = ui.ui_factory.nested_progress_bar()
2484
target_tree.lock_read()
2485
tt = TreeTransform(working_tree, pb)
2812
with target_tree.lock_read(), TreeTransform(working_tree, pb) as tt:
2813
pp = ProgressPhase("Revert phase", 3, pb)
2814
conflicts, merge_modified = _prepare_revert_transform(
2815
working_tree, target_tree, tt, filenames, backups, pp)
2817
change_reporter = delta._ChangeReporter(
2818
unversioned_filter=working_tree.is_ignored)
2819
delta.report_changes(tt.iter_changes(), change_reporter)
2820
for conflict in conflicts:
2821
trace.warning(text_type(conflict))
2824
if working_tree.supports_merge_modified():
2825
working_tree.set_merge_modified(merge_modified)
2487
pp = ProgressPhase("Revert phase", 3, pb)
2488
conflicts, merge_modified = _prepare_revert_transform(
2489
working_tree, target_tree, tt, filenames, backups, pp)
2491
change_reporter = delta._ChangeReporter(
2492
unversioned_filter=working_tree.is_ignored)
2493
delta.report_changes(tt.iter_changes(), change_reporter)
2494
for conflict in conflicts:
2498
working_tree.set_merge_modified(merge_modified)
2500
target_tree.unlock()
2828
2503
return conflicts
2883
2556
if basis_tree is None:
2884
2557
basis_tree = working_tree.basis_tree()
2885
2558
basis_tree.lock_read()
2886
basis_path = find_previous_path(working_tree, basis_tree, wt_path)
2887
if basis_path is None:
2888
if target_kind is None and not target_versioned:
2891
if wt_sha1 != basis_tree.get_file_sha1(basis_path, file_id):
2893
if wt_kind is not None:
2559
if file_id in basis_tree:
2560
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2562
elif kind[1] is None and not versioned[1]:
2564
if kind[0] is not None:
2894
2565
if not keep_content:
2895
2566
tt.delete_contents(trans_id)
2896
elif target_kind is not None:
2897
parent_trans_id = tt.trans_id_file_id(wt_parent)
2898
backup_name = tt._available_backup_name(
2899
wt_name, parent_trans_id)
2567
elif kind[1] is not None:
2568
parent_trans_id = tt.trans_id_file_id(parent[0])
2569
by_parent = tt.by_parent()
2570
backup_name = _get_backup_name(name[0], by_parent,
2571
parent_trans_id, tt)
2900
2572
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2901
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2902
if wt_versioned and target_versioned:
2573
new_trans_id = tt.create_path(name[0], parent_trans_id)
2574
if versioned == (True, True):
2903
2575
tt.unversion_file(trans_id)
2904
2576
tt.version_file(file_id, new_trans_id)
2905
2577
# New contents should have the same unix perms as old
2907
2579
mode_id = trans_id
2908
2580
trans_id = new_trans_id
2909
if target_kind in ('directory', 'tree-reference'):
2581
if kind[1] in ('directory', 'tree-reference'):
2910
2582
tt.create_directory(trans_id)
2911
if target_kind == 'tree-reference':
2912
revision = target_tree.get_reference_revision(
2913
target_path, file_id)
2583
if kind[1] == 'tree-reference':
2584
revision = target_tree.get_reference_revision(file_id,
2914
2586
tt.set_tree_reference(revision, trans_id)
2915
elif target_kind == 'symlink':
2916
tt.create_symlink(target_tree.get_symlink_target(
2917
target_path, file_id), trans_id)
2918
elif target_kind == 'file':
2919
deferred_files.append((target_path, (trans_id, mode_id, file_id)))
2587
elif kind[1] == 'symlink':
2588
tt.create_symlink(target_tree.get_symlink_target(file_id),
2590
elif kind[1] == 'file':
2591
deferred_files.append((file_id, (trans_id, mode_id)))
2920
2592
if basis_tree is None:
2921
2593
basis_tree = working_tree.basis_tree()
2922
2594
basis_tree.lock_read()
2923
new_sha1 = target_tree.get_file_sha1(target_path, file_id)
2924
basis_path = find_previous_path(target_tree, basis_tree, target_path)
2925
if (basis_path is not None and
2926
new_sha1 == basis_tree.get_file_sha1(basis_path, file_id)):
2595
new_sha1 = target_tree.get_file_sha1(file_id)
2596
if (file_id in basis_tree and new_sha1 ==
2597
basis_tree.get_file_sha1(file_id)):
2927
2598
if file_id in merge_modified:
2928
2599
del merge_modified[file_id]
2930
2601
merge_modified[file_id] = new_sha1
2932
2603
# preserve the execute bit when backing up
2933
if keep_content and wt_executable == target_executable:
2934
tt.set_executability(target_executable, trans_id)
2935
elif target_kind is not None:
2936
raise AssertionError(target_kind)
2937
if not wt_versioned and target_versioned:
2604
if keep_content and executable[0] == executable[1]:
2605
tt.set_executability(executable[1], trans_id)
2606
elif kind[1] is not None:
2607
raise AssertionError(kind[1])
2608
if versioned == (False, True):
2938
2609
tt.version_file(file_id, trans_id)
2939
if wt_versioned and not target_versioned:
2610
if versioned == (True, False):
2940
2611
tt.unversion_file(trans_id)
2941
if (target_name is not None and
2942
(wt_name != target_name or wt_parent != target_parent)):
2943
if target_name == '' and target_parent is None:
2612
if (name[1] is not None and
2613
(name[0] != name[1] or parent[0] != parent[1])):
2614
if name[1] == '' and parent[1] is None:
2944
2615
parent_trans = ROOT_PARENT
2946
parent_trans = tt.trans_id_file_id(target_parent)
2947
if wt_parent is None and wt_versioned:
2948
tt.adjust_root_path(target_name, parent_trans)
2950
tt.adjust_path(target_name, parent_trans, trans_id)
2951
if wt_executable != target_executable and target_kind == "file":
2952
tt.set_executability(target_executable, trans_id)
2953
if working_tree.supports_content_filtering():
2954
for (trans_id, mode_id, file_id), bytes in (
2955
target_tree.iter_files_bytes(deferred_files)):
2956
# We're reverting a tree to the target tree so using the
2957
# target tree to find the file path seems the best choice
2958
# here IMO - Ian C 27/Oct/2009
2959
filter_tree_path = target_tree.id2path(file_id)
2960
filters = working_tree._content_filter_stack(filter_tree_path)
2961
bytes = filtered_output_bytes(bytes, filters,
2962
ContentFilterContext(filter_tree_path, working_tree))
2963
tt.create_file(bytes, trans_id, mode_id)
2965
for (trans_id, mode_id, file_id), bytes in target_tree.iter_files_bytes(
2967
tt.create_file(bytes, trans_id, mode_id)
2968
tt.fixup_new_roots()
2617
parent_trans = tt.trans_id_file_id(parent[1])
2618
tt.adjust_path(name[1], parent_trans, trans_id)
2619
if executable[0] != executable[1] and kind[1] == "file":
2620
tt.set_executability(executable[1], trans_id)
2621
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
2623
tt.create_file(bytes, trans_id, mode_id)
2970
2625
if basis_tree is not None:
2971
2626
basis_tree.unlock()
2972
2627
return merge_modified
2975
def resolve_conflicts(tt, pb=None, pass_func=None):
2630
def resolve_conflicts(tt, pb=DummyProgress(), pass_func=None):
2976
2631
"""Make many conflict-resolution attempts, but die if they fail"""
2977
2632
if pass_func is None:
2978
2633
pass_func = conflict_pass
2979
2634
new_conflicts = set()
2980
with ui.ui_factory.nested_progress_bar() as pb:
2981
2636
for n in range(10):
2982
pb.update(gettext('Resolution pass'), n+1, 10)
2637
pb.update('Resolution pass', n+1, 10)
2983
2638
conflicts = tt.find_conflicts()
2984
2639
if len(conflicts) == 0:
2985
2640
return new_conflicts
2986
2641
new_conflicts.update(pass_func(tt, conflicts))
2987
2642
raise MalformedTransform(conflicts=conflicts)
2990
2647
def conflict_pass(tt, conflicts, path_tree=None):