486
390
def new_contents(self, trans_id):
487
391
return (trans_id in self._new_contents)
489
def find_conflicts(self):
393
def find_raw_conflicts(self):
490
394
"""Find any violations of inventory or filesystem invariants"""
491
if self._done is True:
492
raise ReusingTransform()
494
# ensure all children of all existent parents are known
495
# all children of non-existent parents are known, by definition.
496
self._add_tree_children()
497
by_parent = self.by_parent()
498
conflicts.extend(self._unversioned_parents(by_parent))
499
conflicts.extend(self._parent_loops())
500
conflicts.extend(self._duplicate_entries(by_parent))
501
conflicts.extend(self._duplicate_ids())
502
conflicts.extend(self._parent_type_conflicts(by_parent))
503
conflicts.extend(self._improper_versioning())
504
conflicts.extend(self._executability_conflicts())
505
conflicts.extend(self._overwrite_conflicts())
508
def _check_malformed(self):
509
conflicts = self.find_conflicts()
510
if len(conflicts) != 0:
511
raise MalformedTransform(conflicts=conflicts)
513
def _add_tree_children(self):
514
"""Add all the children of all active parents to the known paths.
516
Active parents are those which gain children, and those which are
517
removed. This is a necessary first step in detecting conflicts.
519
parents = self.by_parent().keys()
520
parents.extend([t for t in self._removed_contents if
521
self.tree_kind(t) == 'directory'])
522
for trans_id in self._removed_id:
523
file_id = self.tree_file_id(trans_id)
524
if file_id is not None:
525
if self._tree.inventory[file_id].kind == 'directory':
526
parents.append(trans_id)
527
elif self.tree_kind(trans_id) == 'directory':
528
parents.append(trans_id)
530
for parent_id in parents:
531
# ensure that all children are registered with the transaction
532
list(self.iter_tree_children(parent_id))
534
def has_named_child(self, by_parent, parent_id, name):
536
children = by_parent[parent_id]
539
for child in children:
540
if self.final_name(child) == name:
543
path = self._tree_id_paths[parent_id]
546
childpath = joinpath(path, name)
547
child_id = self._tree_path_ids.get(childpath)
549
return lexists(self._tree.abspath(childpath))
551
if self.final_parent(child_id) != parent_id:
553
if child_id in self._removed_contents:
554
# XXX What about dangling file-ids?
559
def _parent_loops(self):
560
"""No entry should be its own ancestor"""
562
for trans_id in self._new_parent:
565
while parent_id is not ROOT_PARENT:
568
parent_id = self.final_parent(parent_id)
571
if parent_id == trans_id:
572
conflicts.append(('parent loop', trans_id))
573
if parent_id in seen:
577
def _unversioned_parents(self, by_parent):
578
"""If parent directories are versioned, children must be versioned."""
580
for parent_id, children in by_parent.iteritems():
581
if parent_id is ROOT_PARENT:
583
if self.final_file_id(parent_id) is not None:
585
for child_id in children:
586
if self.final_file_id(child_id) is not None:
587
conflicts.append(('unversioned parent', parent_id))
591
def _improper_versioning(self):
592
"""Cannot version a file with no contents, or a bad type.
594
However, existing entries with no contents are okay.
597
for trans_id in self._new_id.iterkeys():
599
kind = self.final_kind(trans_id)
601
conflicts.append(('versioning no contents', trans_id))
603
if not InventoryEntry.versionable_kind(kind):
604
conflicts.append(('versioning bad kind', trans_id, kind))
607
def _executability_conflicts(self):
608
"""Check for bad executability changes.
610
Only versioned files may have their executability set, because
611
1. only versioned entries can have executability under windows
612
2. only files can be executable. (The execute bit on a directory
613
does not indicate searchability)
616
for trans_id in self._new_executability:
617
if self.final_file_id(trans_id) is None:
618
conflicts.append(('unversioned executability', trans_id))
621
non_file = self.final_kind(trans_id) != "file"
625
conflicts.append(('non-file executability', trans_id))
628
def _overwrite_conflicts(self):
629
"""Check for overwrites (not permitted on Win32)"""
631
for trans_id in self._new_contents:
633
self.tree_kind(trans_id)
636
if trans_id not in self._removed_contents:
637
conflicts.append(('overwrite', trans_id,
638
self.final_name(trans_id)))
641
def _duplicate_entries(self, by_parent):
642
"""No directory may have two entries with the same name."""
644
if (self._new_name, self._new_parent) == ({}, {}):
646
for children in by_parent.itervalues():
647
name_ids = [(self.final_name(t), t) for t in children]
648
if not self._case_sensitive_target:
649
name_ids = [(n.lower(), t) for n, t in name_ids]
653
for name, trans_id in name_ids:
655
kind = self.final_kind(trans_id)
658
file_id = self.final_file_id(trans_id)
659
if kind is None and file_id is None:
661
if name == last_name:
662
conflicts.append(('duplicate', last_trans_id, trans_id,
665
last_trans_id = trans_id
668
def _duplicate_ids(self):
669
"""Each inventory id may only be used once"""
671
removed_tree_ids = set((self.tree_file_id(trans_id) for trans_id in
673
all_ids = self._tree.all_file_ids()
674
active_tree_ids = all_ids.difference(removed_tree_ids)
675
for trans_id, file_id in self._new_id.iteritems():
676
if file_id in active_tree_ids:
677
old_trans_id = self.trans_id_tree_file_id(file_id)
678
conflicts.append(('duplicate id', old_trans_id, trans_id))
681
def _parent_type_conflicts(self, by_parent):
682
"""parents must have directory 'contents'."""
684
for parent_id, children in by_parent.iteritems():
685
if parent_id is ROOT_PARENT:
687
if not self._any_contents(children):
689
for child in children:
691
self.final_kind(child)
695
kind = self.final_kind(parent_id)
699
conflicts.append(('missing parent', parent_id))
700
elif kind != "directory":
701
conflicts.append(('non-directory parent', parent_id))
704
def _any_contents(self, trans_ids):
705
"""Return true if any of the trans_ids, will have contents."""
706
for trans_id in trans_ids:
708
kind = self.final_kind(trans_id)
714
def _set_executability(self, path, trans_id):
715
"""Set the executability of versioned files """
716
if supports_executable():
717
new_executability = self._new_executability[trans_id]
718
abspath = self._tree.abspath(path)
719
current_mode = os.stat(abspath).st_mode
720
if new_executability:
723
to_mode = current_mode | (0100 & ~umask)
724
# Enable x-bit for others only if they can read it.
725
if current_mode & 0004:
726
to_mode |= 0001 & ~umask
727
if current_mode & 0040:
728
to_mode |= 0010 & ~umask
730
to_mode = current_mode & ~0111
731
os.chmod(abspath, to_mode)
733
def _new_entry(self, name, parent_id, file_id):
734
"""Helper function to create a new filesystem entry."""
735
trans_id = self.create_path(name, parent_id)
736
if file_id is not None:
737
self.version_file(file_id, trans_id)
395
raise NotImplementedError(self.find_raw_conflicts)
740
397
def new_file(self, name, parent_id, contents, file_id=None,
398
executable=None, sha1=None):
742
399
"""Convenience method to create files.
744
401
name is the name of the file to create.
925
456
The tree is a snapshot, and altering the TreeTransform will invalidate
928
return _PreviewTree(self)
459
raise NotImplementedError(self.get_preview_tree)
930
def commit(self, branch, message, merge_parents=None, strict=False):
461
def commit(self, branch, message, merge_parents=None, strict=False,
462
timestamp=None, timezone=None, committer=None, authors=None,
463
revprops=None, revision_id=None):
931
464
"""Commit the result of this TreeTransform to a branch.
933
466
:param branch: The branch to commit to.
934
467
:param message: The message to attach to the commit.
935
:param merge_parents: Additional parents specified by pending merges.
468
:param merge_parents: Additional parent revision-ids specified by
470
:param strict: If True, abort the commit if there are unversioned
472
:param timestamp: if not None, seconds-since-epoch for the time and
473
date. (May be a float.)
474
:param timezone: Optional timezone for timestamp, as an offset in
476
:param committer: Optional committer in email-id format.
477
(e.g. "J Random Hacker <jrandom@example.com>")
478
:param authors: Optional list of authors in email-id format.
479
:param revprops: Optional dictionary of revision properties.
480
:param revision_id: Optional revision id. (Specifying a revision-id
481
may reduce performance for some non-native formats.)
936
482
:return: The revision_id of the revision committed.
938
self._check_malformed()
940
unversioned = set(self._new_contents).difference(set(self._new_id))
941
for trans_id in unversioned:
942
if self.final_file_id(trans_id) is None:
943
raise errors.StrictCommitFailed()
945
revno, last_rev_id = branch.last_revision_info()
946
if last_rev_id == _mod_revision.NULL_REVISION:
947
if merge_parents is not None:
948
raise ValueError('Cannot supply merge parents for first'
952
parent_ids = [last_rev_id]
953
if merge_parents is not None:
954
parent_ids.extend(merge_parents)
955
if self._tree.get_revision_id() != last_rev_id:
956
raise ValueError('TreeTransform not based on branch basis: %s' %
957
self._tree.get_revision_id())
958
builder = branch.get_commit_builder(parent_ids)
959
preview = self.get_preview_tree()
960
list(builder.record_iter_changes(preview, last_rev_id,
961
self.iter_changes()))
962
builder.finish_inventory()
963
revision_id = builder.commit(message)
964
branch.set_last_revision_info(revno + 1, revision_id)
967
def _text_parent(self, trans_id):
968
file_id = self.tree_file_id(trans_id)
970
if file_id is None or self._tree.kind(file_id) != 'file':
972
except errors.NoSuchFile:
976
def _get_parents_texts(self, trans_id):
977
"""Get texts for compression parents of this file."""
978
file_id = self._text_parent(trans_id)
981
return (self._tree.get_file_text(file_id),)
983
def _get_parents_lines(self, trans_id):
984
"""Get lines for compression parents of this file."""
985
file_id = self._text_parent(trans_id)
988
return (self._tree.get_file_lines(file_id),)
990
def serialize(self, serializer):
991
"""Serialize this TreeTransform.
993
:param serializer: A Serialiser like pack.ContainerSerializer.
995
new_name = dict((k, v.encode('utf-8')) for k, v in
996
self._new_name.items())
997
new_executability = dict((k, int(v)) for k, v in
998
self._new_executability.items())
999
tree_path_ids = dict((k.encode('utf-8'), v)
1000
for k, v in self._tree_path_ids.items())
1002
'_id_number': self._id_number,
1003
'_new_name': new_name,
1004
'_new_parent': self._new_parent,
1005
'_new_executability': new_executability,
1006
'_new_id': self._new_id,
1007
'_tree_path_ids': tree_path_ids,
1008
'_removed_id': list(self._removed_id),
1009
'_removed_contents': list(self._removed_contents),
1010
'_non_present_ids': self._non_present_ids,
1012
yield serializer.bytes_record(bencode.bencode(attribs),
1014
for trans_id, kind in self._new_contents.items():
1016
lines = osutils.chunks_to_lines(
1017
self._read_file_chunks(trans_id))
1018
parents = self._get_parents_lines(trans_id)
1019
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1020
content = ''.join(mpdiff.to_patch())
1021
if kind == 'directory':
1023
if kind == 'symlink':
1024
content = self._read_symlink_target(trans_id)
1025
yield serializer.bytes_record(content, ((trans_id, kind),))
1027
def deserialize(self, records):
1028
"""Deserialize a stored TreeTransform.
1030
:param records: An iterable of (names, content) tuples, as per
1031
pack.ContainerPushParser.
1033
names, content = records.next()
1034
attribs = bencode.bdecode(content)
1035
self._id_number = attribs['_id_number']
1036
self._new_name = dict((k, v.decode('utf-8'))
1037
for k, v in attribs['_new_name'].items())
1038
self._new_parent = attribs['_new_parent']
1039
self._new_executability = dict((k, bool(v)) for k, v in
1040
attribs['_new_executability'].items())
1041
self._new_id = attribs['_new_id']
1042
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1043
self._tree_path_ids = {}
1044
self._tree_id_paths = {}
1045
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1046
path = bytepath.decode('utf-8')
1047
self._tree_path_ids[path] = trans_id
1048
self._tree_id_paths[trans_id] = path
1049
self._removed_id = set(attribs['_removed_id'])
1050
self._removed_contents = set(attribs['_removed_contents'])
1051
self._non_present_ids = attribs['_non_present_ids']
1052
for ((trans_id, kind),), content in records:
1054
mpdiff = multiparent.MultiParent.from_patch(content)
1055
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1056
self.create_file(lines, trans_id)
1057
if kind == 'directory':
1058
self.create_directory(trans_id)
1059
if kind == 'symlink':
1060
self.create_symlink(content.decode('utf-8'), trans_id)
1063
class DiskTreeTransform(TreeTransformBase):
1064
"""Tree transform storing its contents on disk."""
1066
def __init__(self, tree, limbodir, pb=None,
1067
case_sensitive=True):
1069
:param tree: The tree that will be transformed, but not necessarily
1071
:param limbodir: A directory where new files can be stored until
1072
they are installed in their proper places
1074
:param case_sensitive: If True, the target of the transform is
1075
case sensitive, not just case preserving.
1077
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1078
self._limbodir = limbodir
1079
self._deletiondir = None
1080
# A mapping of transform ids to their limbo filename
1081
self._limbo_files = {}
1082
# A mapping of transform ids to a set of the transform ids of children
1083
# that their limbo directory has
1084
self._limbo_children = {}
1085
# Map transform ids to maps of child filename to child transform id
1086
self._limbo_children_names = {}
1087
# List of transform ids that need to be renamed from limbo into place
1088
self._needs_rename = set()
1089
self._creation_mtime = None
1092
"""Release the working tree lock, if held, clean up limbo dir.
1094
This is required if apply has not been invoked, but can be invoked
1097
if self._tree is None:
1100
entries = [(self._limbo_name(t), t, k) for t, k in
1101
self._new_contents.iteritems()]
1102
entries.sort(reverse=True)
1103
for path, trans_id, kind in entries:
1106
delete_any(self._limbodir)
1108
# We don't especially care *why* the dir is immortal.
1109
raise ImmortalLimbo(self._limbodir)
1111
if self._deletiondir is not None:
1112
delete_any(self._deletiondir)
1114
raise errors.ImmortalPendingDeletion(self._deletiondir)
1116
TreeTransformBase.finalize(self)
1118
def _limbo_name(self, trans_id):
1119
"""Generate the limbo name of a file"""
1120
limbo_name = self._limbo_files.get(trans_id)
1121
if limbo_name is None:
1122
limbo_name = self._generate_limbo_path(trans_id)
1123
self._limbo_files[trans_id] = limbo_name
1126
def _generate_limbo_path(self, trans_id):
1127
"""Generate a limbo path using the trans_id as the relative path.
1129
This is suitable as a fallback, and when the transform should not be
1130
sensitive to the path encoding of the limbo directory.
1132
self._needs_rename.add(trans_id)
1133
return pathjoin(self._limbodir, trans_id)
1135
def adjust_path(self, name, parent, trans_id):
1136
previous_parent = self._new_parent.get(trans_id)
1137
previous_name = self._new_name.get(trans_id)
1138
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1139
if (trans_id in self._limbo_files and
1140
trans_id not in self._needs_rename):
1141
self._rename_in_limbo([trans_id])
1142
if previous_parent != parent:
1143
self._limbo_children[previous_parent].remove(trans_id)
1144
if previous_parent != parent or previous_name != name:
1145
del self._limbo_children_names[previous_parent][previous_name]
1147
def _rename_in_limbo(self, trans_ids):
1148
"""Fix limbo names so that the right final path is produced.
1150
This means we outsmarted ourselves-- we tried to avoid renaming
1151
these files later by creating them with their final names in their
1152
final parents. But now the previous name or parent is no longer
1153
suitable, so we have to rename them.
1155
Even for trans_ids that have no new contents, we must remove their
1156
entries from _limbo_files, because they are now stale.
1158
for trans_id in trans_ids:
1159
old_path = self._limbo_files.pop(trans_id)
1160
if trans_id not in self._new_contents:
1162
new_path = self._limbo_name(trans_id)
1163
osutils.rename(old_path, new_path)
1164
for descendant in self._limbo_descendants(trans_id):
1165
desc_path = self._limbo_files[descendant]
1166
desc_path = new_path + desc_path[len(old_path):]
1167
self._limbo_files[descendant] = desc_path
1169
def _limbo_descendants(self, trans_id):
1170
"""Return the set of trans_ids whose limbo paths descend from this."""
1171
descendants = set(self._limbo_children.get(trans_id, []))
1172
for descendant in list(descendants):
1173
descendants.update(self._limbo_descendants(descendant))
1176
def create_file(self, contents, trans_id, mode_id=None):
484
raise NotImplementedError(self.commit)
486
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1177
487
"""Schedule creation of a new file.
1181
Contents is an iterator of strings, all of which will be written
1182
to the target destination.
1184
New file takes the permissions of any existing file with that id,
1185
unless mode_id is specified.
1187
name = self._limbo_name(trans_id)
1188
f = open(name, 'wb')
1191
unique_add(self._new_contents, trans_id, 'file')
1193
# Clean up the file, it never got registered so
1194
# TreeTransform.finalize() won't clean it up.
1199
f.writelines(contents)
1202
self._set_mtime(name)
1203
self._set_mode(trans_id, mode_id, S_ISREG)
1205
def _read_file_chunks(self, trans_id):
1206
cur_file = open(self._limbo_name(trans_id), 'rb')
1208
return cur_file.readlines()
1212
def _read_symlink_target(self, trans_id):
1213
return os.readlink(self._limbo_name(trans_id))
1215
def _set_mtime(self, path):
1216
"""All files that are created get the same mtime.
1218
This time is set by the first object to be created.
1220
if self._creation_mtime is None:
1221
self._creation_mtime = time.time()
1222
os.utime(path, (self._creation_mtime, self._creation_mtime))
1224
def create_hardlink(self, path, trans_id):
1225
"""Schedule creation of a hard link"""
1226
name = self._limbo_name(trans_id)
1230
if e.errno != errno.EPERM:
1232
raise errors.HardLinkNotSupported(path)
1234
unique_add(self._new_contents, trans_id, 'file')
1236
# Clean up the file, it never got registered so
1237
# TreeTransform.finalize() won't clean it up.
491
:param contents: an iterator of strings, all of which will be written
492
to the target destination.
493
:param trans_id: TreeTransform handle
494
:param mode_id: If not None, force the mode of the target file to match
495
the mode of the object referenced by mode_id.
496
Otherwise, we will try to preserve mode bits of an existing file.
497
:param sha1: If the sha1 of this content is already known, pass it in.
498
We can use it to prevent future sha1 computations.
500
raise NotImplementedError(self.create_file)
1241
502
def create_directory(self, trans_id):
1242
503
"""Schedule creation of a new directory.
1244
505
See also new_directory.
1246
os.mkdir(self._limbo_name(trans_id))
1247
unique_add(self._new_contents, trans_id, 'directory')
507
raise NotImplementedError(self.create_directory)
1249
509
def create_symlink(self, target, trans_id):
1250
510
"""Schedule creation of a new symbolic link.
1252
512
target is a bytestring.
1253
513
See also new_symlink.
1256
os.symlink(target, self._limbo_name(trans_id))
1257
unique_add(self._new_contents, trans_id, 'symlink')
1260
path = FinalPaths(self).get_path(trans_id)
1263
raise UnableCreateSymlink(path=path)
515
raise NotImplementedError(self.create_symlink)
517
def create_hardlink(self, path, trans_id):
518
"""Schedule creation of a hard link"""
519
raise NotImplementedError(self.create_hardlink)
1265
521
def cancel_creation(self, trans_id):
1266
522
"""Cancel the creation of new file contents."""
1267
del self._new_contents[trans_id]
1268
children = self._limbo_children.get(trans_id)
1269
# if this is a limbo directory with children, move them before removing
1271
if children is not None:
1272
self._rename_in_limbo(children)
1273
del self._limbo_children[trans_id]
1274
del self._limbo_children_names[trans_id]
1275
delete_any(self._limbo_name(trans_id))
1278
class TreeTransform(DiskTreeTransform):
1279
"""Represent a tree transformation.
1281
This object is designed to support incremental generation of the transform,
1284
However, it gives optimum performance when parent directories are created
1285
before their contents. The transform is then able to put child files
1286
directly in their parent directory, avoiding later renames.
1288
It is easy to produce malformed transforms, but they are generally
1289
harmless. Attempting to apply a malformed transform will cause an
1290
exception to be raised before any modifications are made to the tree.
1292
Many kinds of malformed transforms can be corrected with the
1293
resolve_conflicts function. The remaining ones indicate programming error,
1294
such as trying to create a file with no path.
1296
Two sets of file creation methods are supplied. Convenience methods are:
1301
These are composed of the low-level methods:
1303
* create_file or create_directory or create_symlink
1307
Transform/Transaction ids
1308
-------------------------
1309
trans_ids are temporary ids assigned to all files involved in a transform.
1310
It's possible, even common, that not all files in the Tree have trans_ids.
1312
trans_ids are used because filenames and file_ids are not good enough
1313
identifiers; filenames change, and not all files have file_ids. File-ids
1314
are also associated with trans-ids, so that moving a file moves its
1317
trans_ids are only valid for the TreeTransform that generated them.
1321
Limbo is a temporary directory use to hold new versions of files.
1322
Files are added to limbo by create_file, create_directory, create_symlink,
1323
and their convenience variants (new_*). Files may be removed from limbo
1324
using cancel_creation. Files are renamed from limbo into their final
1325
location as part of TreeTransform.apply
1327
Limbo must be cleaned up, by either calling TreeTransform.apply or
1328
calling TreeTransform.finalize.
1330
Files are placed into limbo inside their parent directories, where
1331
possible. This reduces subsequent renames, and makes operations involving
1332
lots of files faster. This optimization is only possible if the parent
1333
directory is created *before* creating any of its children, so avoid
1334
creating children before parents, where possible.
1338
This temporary directory is used by _FileMover for storing files that are
1339
about to be deleted. In case of rollback, the files will be restored.
1340
FileMover does not delete files until it is sure that a rollback will not
1343
def __init__(self, tree, pb=None):
1344
"""Note: a tree_write lock is taken on the tree.
1346
Use TreeTransform.finalize() to release the lock (can be omitted if
1347
TreeTransform.apply() called).
1349
tree.lock_tree_write()
1352
limbodir = urlutils.local_path_from_url(
1353
tree._transport.abspath('limbo'))
1357
if e.errno == errno.EEXIST:
1358
raise ExistingLimbo(limbodir)
1359
deletiondir = urlutils.local_path_from_url(
1360
tree._transport.abspath('pending-deletion'))
1362
os.mkdir(deletiondir)
1364
if e.errno == errno.EEXIST:
1365
raise errors.ExistingPendingDeletion(deletiondir)
1370
# Cache of realpath results, to speed up canonical_path
1371
self._realpaths = {}
1372
# Cache of relpath results, to speed up canonical_path
1374
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1375
tree.case_sensitive)
1376
self._deletiondir = deletiondir
1378
def canonical_path(self, path):
1379
"""Get the canonical tree-relative path"""
1380
# don't follow final symlinks
1381
abs = self._tree.abspath(path)
1382
if abs in self._relpaths:
1383
return self._relpaths[abs]
1384
dirname, basename = os.path.split(abs)
1385
if dirname not in self._realpaths:
1386
self._realpaths[dirname] = os.path.realpath(dirname)
1387
dirname = self._realpaths[dirname]
1388
abs = pathjoin(dirname, basename)
1389
if dirname in self._relpaths:
1390
relpath = pathjoin(self._relpaths[dirname], basename)
1391
relpath = relpath.rstrip('/\\')
1393
relpath = self._tree.relpath(abs)
1394
self._relpaths[abs] = relpath
1397
def tree_kind(self, trans_id):
1398
"""Determine the file kind in the working tree.
1400
Raises NoSuchFile if the file does not exist
1402
path = self._tree_id_paths.get(trans_id)
1404
raise NoSuchFile(None)
1406
return file_kind(self._tree.abspath(path))
1408
if e.errno != errno.ENOENT:
1411
raise NoSuchFile(path)
1413
def _set_mode(self, trans_id, mode_id, typefunc):
1414
"""Set the mode of new file contents.
1415
The mode_id is the existing file to get the mode from (often the same
1416
as trans_id). The operation is only performed if there's a mode match
1417
according to typefunc.
1422
old_path = self._tree_id_paths[mode_id]
1426
mode = os.stat(self._tree.abspath(old_path)).st_mode
1428
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1429
# Either old_path doesn't exist, or the parent of the
1430
# target is not a directory (but will be one eventually)
1431
# Either way, we know it doesn't exist *right now*
1432
# See also bug #248448
1437
os.chmod(self._limbo_name(trans_id), mode)
1439
def iter_tree_children(self, parent_id):
1440
"""Iterate through the entry's tree children, if any"""
1442
path = self._tree_id_paths[parent_id]
1446
children = os.listdir(self._tree.abspath(path))
1448
if not (osutils._is_error_enotdir(e)
1449
or e.errno in (errno.ENOENT, errno.ESRCH)):
1453
for child in children:
1454
childpath = joinpath(path, child)
1455
if self._tree.is_control_filename(childpath):
1457
yield self.trans_id_tree_path(childpath)
1459
def _generate_limbo_path(self, trans_id):
1460
"""Generate a limbo path using the final path if possible.
1462
This optimizes the performance of applying the tree transform by
1463
avoiding renames. These renames can be avoided only when the parent
1464
directory is already scheduled for creation.
1466
If the final path cannot be used, falls back to using the trans_id as
1469
parent = self._new_parent.get(trans_id)
1470
# if the parent directory is already in limbo (e.g. when building a
1471
# tree), choose a limbo name inside the parent, to reduce further
1473
use_direct_path = False
1474
if self._new_contents.get(parent) == 'directory':
1475
filename = self._new_name.get(trans_id)
1476
if filename is not None:
1477
if parent not in self._limbo_children:
1478
self._limbo_children[parent] = set()
1479
self._limbo_children_names[parent] = {}
1480
use_direct_path = True
1481
# the direct path can only be used if no other file has
1482
# already taken this pathname, i.e. if the name is unused, or
1483
# if it is already associated with this trans_id.
1484
elif self._case_sensitive_target:
1485
if (self._limbo_children_names[parent].get(filename)
1486
in (trans_id, None)):
1487
use_direct_path = True
1489
for l_filename, l_trans_id in\
1490
self._limbo_children_names[parent].iteritems():
1491
if l_trans_id == trans_id:
1493
if l_filename.lower() == filename.lower():
1496
use_direct_path = True
1498
if not use_direct_path:
1499
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1501
limbo_name = pathjoin(self._limbo_files[parent], filename)
1502
self._limbo_children[parent].add(trans_id)
1503
self._limbo_children_names[parent][filename] = trans_id
1507
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1508
"""Apply all changes to the inventory and filesystem.
1510
If filesystem or inventory conflicts are present, MalformedTransform
1513
If apply succeeds, finalize is not necessary.
1515
:param no_conflicts: if True, the caller guarantees there are no
1516
conflicts, so no check is made.
1517
:param precomputed_delta: An inventory delta to use instead of
1519
:param _mover: Supply an alternate FileMover, for testing
1521
if not no_conflicts:
1522
self._check_malformed()
1523
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1525
if precomputed_delta is None:
1526
child_pb.update('Apply phase', 0, 2)
1527
inventory_delta = self._generate_inventory_delta()
1530
inventory_delta = precomputed_delta
1533
mover = _FileMover()
1537
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1538
self._apply_removals(mover)
1539
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1540
modified_paths = self._apply_insertions(mover)
1545
mover.apply_deletions()
1548
self._tree.apply_inventory_delta(inventory_delta)
1551
return _TransformResults(modified_paths, self.rename_count)
1553
def _generate_inventory_delta(self):
1554
"""Generate an inventory delta for the current transform."""
1555
inventory_delta = []
1556
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1557
new_paths = self._inventory_altered()
1558
total_entries = len(new_paths) + len(self._removed_id)
1560
for num, trans_id in enumerate(self._removed_id):
1562
child_pb.update('removing file', num, total_entries)
1563
if trans_id == self._new_root:
1564
file_id = self._tree.get_root_id()
1566
file_id = self.tree_file_id(trans_id)
1567
# File-id isn't really being deleted, just moved
1568
if file_id in self._r_new_id:
1570
path = self._tree_id_paths[trans_id]
1571
inventory_delta.append((path, None, file_id, None))
1572
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1574
entries = self._tree.iter_entries_by_dir(
1575
new_path_file_ids.values())
1576
old_paths = dict((e.file_id, p) for p, e in entries)
1578
for num, (path, trans_id) in enumerate(new_paths):
1580
child_pb.update('adding file',
1581
num + len(self._removed_id), total_entries)
1582
file_id = new_path_file_ids[trans_id]
1587
kind = self.final_kind(trans_id)
1589
kind = self._tree.stored_kind(file_id)
1590
parent_trans_id = self.final_parent(trans_id)
1591
parent_file_id = new_path_file_ids.get(parent_trans_id)
1592
if parent_file_id is None:
1593
parent_file_id = self.final_file_id(parent_trans_id)
1594
if trans_id in self._new_reference_revision:
1595
new_entry = inventory.TreeReference(
1597
self._new_name[trans_id],
1598
self.final_file_id(self._new_parent[trans_id]),
1599
None, self._new_reference_revision[trans_id])
1601
new_entry = inventory.make_entry(kind,
1602
self.final_name(trans_id),
1603
parent_file_id, file_id)
1604
old_path = old_paths.get(new_entry.file_id)
1605
new_executability = self._new_executability.get(trans_id)
1606
if new_executability is not None:
1607
new_entry.executable = new_executability
1608
inventory_delta.append(
1609
(old_path, path, new_entry.file_id, new_entry))
1612
return inventory_delta
1614
def _apply_removals(self, mover):
1615
"""Perform tree operations that remove directory/inventory names.
1617
That is, delete files that are to be deleted, and put any files that
1618
need renaming into limbo. This must be done in strict child-to-parent
1621
If inventory_delta is None, no inventory delta generation is performed.
1623
tree_paths = list(self._tree_path_ids.iteritems())
1624
tree_paths.sort(reverse=True)
1625
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1627
for num, data in enumerate(tree_paths):
1628
path, trans_id = data
1629
child_pb.update('removing file', num, len(tree_paths))
1630
full_path = self._tree.abspath(path)
1631
if trans_id in self._removed_contents:
1632
delete_path = os.path.join(self._deletiondir, trans_id)
1633
mover.pre_delete(full_path, delete_path)
1634
elif (trans_id in self._new_name
1635
or trans_id in self._new_parent):
1637
mover.rename(full_path, self._limbo_name(trans_id))
1639
if e.errno != errno.ENOENT:
1642
self.rename_count += 1
1646
def _apply_insertions(self, mover):
1647
"""Perform tree operations that insert directory/inventory names.
1649
That is, create any files that need to be created, and restore from
1650
limbo any files that needed renaming. This must be done in strict
1651
parent-to-child order.
1653
If inventory_delta is None, no inventory delta is calculated, and
1654
no list of modified paths is returned.
1656
new_paths = self.new_paths(filesystem_only=True)
1658
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1660
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1662
for num, (path, trans_id) in enumerate(new_paths):
1664
child_pb.update('adding file', num, len(new_paths))
1665
full_path = self._tree.abspath(path)
1666
if trans_id in self._needs_rename:
1668
mover.rename(self._limbo_name(trans_id), full_path)
1670
# We may be renaming a dangling inventory id
1671
if e.errno != errno.ENOENT:
1674
self.rename_count += 1
1675
if (trans_id in self._new_contents or
1676
self.path_changed(trans_id)):
1677
if trans_id in self._new_contents:
1678
modified_paths.append(full_path)
1679
if trans_id in self._new_executability:
1680
self._set_executability(path, trans_id)
1683
self._new_contents.clear()
1684
return modified_paths
1687
class TransformPreview(DiskTreeTransform):
1688
"""A TreeTransform for generating preview trees.
1690
Unlike TreeTransform, this version works when the input tree is a
1691
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1692
unversioned files in the input tree.
1695
def __init__(self, tree, pb=None, case_sensitive=True):
1697
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1698
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1700
def canonical_path(self, path):
1703
def tree_kind(self, trans_id):
1704
path = self._tree_id_paths.get(trans_id)
1706
raise NoSuchFile(None)
1707
file_id = self._tree.path2id(path)
1708
return self._tree.kind(file_id)
1710
def _set_mode(self, trans_id, mode_id, typefunc):
1711
"""Set the mode of new file contents.
1712
The mode_id is the existing file to get the mode from (often the same
1713
as trans_id). The operation is only performed if there's a mode match
1714
according to typefunc.
1716
# is it ok to ignore this? probably
1719
def iter_tree_children(self, parent_id):
1720
"""Iterate through the entry's tree children, if any"""
1722
path = self._tree_id_paths[parent_id]
1725
file_id = self.tree_file_id(parent_id)
1728
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1729
children = getattr(entry, 'children', {})
1730
for child in children:
1731
childpath = joinpath(path, child)
1732
yield self.trans_id_tree_path(childpath)
1735
class _PreviewTree(tree.Tree):
1736
"""Partial implementation of Tree to support show_diff_trees"""
1738
def __init__(self, transform):
1739
self._transform = transform
1740
self._final_paths = FinalPaths(transform)
1741
self.__by_parent = None
1742
self._parent_ids = []
1743
self._all_children_cache = {}
1744
self._path2trans_id_cache = {}
1745
self._final_name_cache = {}
1746
self._iter_changes_cache = dict((c[0], c) for c in
1747
self._transform.iter_changes())
1749
def _content_change(self, file_id):
1750
"""Return True if the content of this file changed"""
1751
changes = self._iter_changes_cache.get(file_id)
1752
# changes[2] is true if the file content changed. See
1753
# InterTree.iter_changes.
1754
return (changes is not None and changes[2])
1756
def _get_repository(self):
1757
repo = getattr(self._transform._tree, '_repository', None)
1759
repo = self._transform._tree.branch.repository
1762
def _iter_parent_trees(self):
1763
for revision_id in self.get_parent_ids():
1765
yield self.revision_tree(revision_id)
1766
except errors.NoSuchRevisionInTree:
1767
yield self._get_repository().revision_tree(revision_id)
1769
def _get_file_revision(self, file_id, vf, tree_revision):
1770
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
1771
self._iter_parent_trees()]
1772
vf.add_lines((file_id, tree_revision), parent_keys,
1773
self.get_file(file_id).readlines())
1774
repo = self._get_repository()
1775
base_vf = repo.texts
1776
if base_vf not in vf.fallback_versionedfiles:
1777
vf.fallback_versionedfiles.append(base_vf)
1778
return tree_revision
1780
def _stat_limbo_file(self, file_id):
1781
trans_id = self._transform.trans_id_file_id(file_id)
1782
name = self._transform._limbo_name(trans_id)
1783
return os.lstat(name)
1786
def _by_parent(self):
1787
if self.__by_parent is None:
1788
self.__by_parent = self._transform.by_parent()
1789
return self.__by_parent
1791
def _comparison_data(self, entry, path):
1792
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
1793
if kind == 'missing':
1797
file_id = self._transform.final_file_id(self._path2trans_id(path))
1798
executable = self.is_executable(file_id, path)
1799
return kind, executable, None
1801
def lock_read(self):
1802
# Perhaps in theory, this should lock the TreeTransform?
1809
def inventory(self):
1810
"""This Tree does not use inventory as its backing data."""
1811
raise NotImplementedError(_PreviewTree.inventory)
1813
def get_root_id(self):
1814
return self._transform.final_file_id(self._transform.root)
1816
def all_file_ids(self):
1817
tree_ids = set(self._transform._tree.all_file_ids())
1818
tree_ids.difference_update(self._transform.tree_file_id(t)
1819
for t in self._transform._removed_id)
1820
tree_ids.update(self._transform._new_id.values())
1824
return iter(self.all_file_ids())
1826
def _has_id(self, file_id, fallback_check):
1827
if file_id in self._transform._r_new_id:
1829
elif file_id in set([self._transform.tree_file_id(trans_id) for
1830
trans_id in self._transform._removed_id]):
1833
return fallback_check(file_id)
1835
def has_id(self, file_id):
1836
return self._has_id(file_id, self._transform._tree.has_id)
1838
def has_or_had_id(self, file_id):
1839
return self._has_id(file_id, self._transform._tree.has_or_had_id)
1841
def _path2trans_id(self, path):
1842
# We must not use None here, because that is a valid value to store.
1843
trans_id = self._path2trans_id_cache.get(path, object)
1844
if trans_id is not object:
1846
segments = splitpath(path)
1847
cur_parent = self._transform.root
1848
for cur_segment in segments:
1849
for child in self._all_children(cur_parent):
1850
final_name = self._final_name_cache.get(child)
1851
if final_name is None:
1852
final_name = self._transform.final_name(child)
1853
self._final_name_cache[child] = final_name
1854
if final_name == cur_segment:
1858
self._path2trans_id_cache[path] = None
1860
self._path2trans_id_cache[path] = cur_parent
1863
def path2id(self, path):
1864
return self._transform.final_file_id(self._path2trans_id(path))
1866
def id2path(self, file_id):
1867
trans_id = self._transform.trans_id_file_id(file_id)
1869
return self._final_paths._determine_path(trans_id)
1871
raise errors.NoSuchId(self, file_id)
1873
def _all_children(self, trans_id):
1874
children = self._all_children_cache.get(trans_id)
1875
if children is not None:
1877
children = set(self._transform.iter_tree_children(trans_id))
1878
# children in the _new_parent set are provided by _by_parent.
1879
children.difference_update(self._transform._new_parent.keys())
1880
children.update(self._by_parent.get(trans_id, []))
1881
self._all_children_cache[trans_id] = children
1884
def iter_children(self, file_id):
1885
trans_id = self._transform.trans_id_file_id(file_id)
1886
for child_trans_id in self._all_children(trans_id):
1887
yield self._transform.final_file_id(child_trans_id)
1890
possible_extras = set(self._transform.trans_id_tree_path(p) for p
1891
in self._transform._tree.extras())
1892
possible_extras.update(self._transform._new_contents)
1893
possible_extras.update(self._transform._removed_id)
1894
for trans_id in possible_extras:
1895
if self._transform.final_file_id(trans_id) is None:
1896
yield self._final_paths._determine_path(trans_id)
1898
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
1899
yield_parents=False):
1900
for trans_id, parent_file_id in ordered_entries:
1901
file_id = self._transform.final_file_id(trans_id)
1904
if (specific_file_ids is not None
1905
and file_id not in specific_file_ids):
1908
kind = self._transform.final_kind(trans_id)
1910
kind = self._transform._tree.stored_kind(file_id)
1911
new_entry = inventory.make_entry(
1913
self._transform.final_name(trans_id),
1914
parent_file_id, file_id)
1915
yield new_entry, trans_id
1917
def _list_files_by_dir(self):
1918
todo = [ROOT_PARENT]
1920
while len(todo) > 0:
1922
parent_file_id = self._transform.final_file_id(parent)
1923
children = list(self._all_children(parent))
1924
paths = dict(zip(children, self._final_paths.get_paths(children)))
1925
children.sort(key=paths.get)
1926
todo.extend(reversed(children))
1927
for trans_id in children:
1928
ordered_ids.append((trans_id, parent_file_id))
1931
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
1932
# This may not be a maximally efficient implementation, but it is
1933
# reasonably straightforward. An implementation that grafts the
1934
# TreeTransform changes onto the tree's iter_entries_by_dir results
1935
# might be more efficient, but requires tricky inferences about stack
1937
ordered_ids = self._list_files_by_dir()
1938
for entry, trans_id in self._make_inv_entries(ordered_ids,
1939
specific_file_ids, yield_parents=yield_parents):
1940
yield unicode(self._final_paths.get_path(trans_id)), entry
1942
def _iter_entries_for_dir(self, dir_path):
1943
"""Return path, entry for items in a directory without recursing down."""
1944
dir_file_id = self.path2id(dir_path)
1946
for file_id in self.iter_children(dir_file_id):
1947
trans_id = self._transform.trans_id_file_id(file_id)
1948
ordered_ids.append((trans_id, file_id))
1949
for entry, trans_id in self._make_inv_entries(ordered_ids):
1950
yield unicode(self._final_paths.get_path(trans_id)), entry
1952
def list_files(self, include_root=False, from_dir=None, recursive=True):
1953
"""See WorkingTree.list_files."""
1954
# XXX This should behave like WorkingTree.list_files, but is really
1955
# more like RevisionTree.list_files.
1959
prefix = from_dir + '/'
1960
entries = self.iter_entries_by_dir()
1961
for path, entry in entries:
1962
if entry.name == '' and not include_root:
1965
if not path.startswith(prefix):
1967
path = path[len(prefix):]
1968
yield path, 'V', entry.kind, entry.file_id, entry
1970
if from_dir is None and include_root is True:
1971
root_entry = inventory.make_entry('directory', '',
1972
ROOT_PARENT, self.get_root_id())
1973
yield '', 'V', 'directory', root_entry.file_id, root_entry
1974
entries = self._iter_entries_for_dir(from_dir or '')
1975
for path, entry in entries:
1976
yield path, 'V', entry.kind, entry.file_id, entry
1978
def kind(self, file_id):
1979
trans_id = self._transform.trans_id_file_id(file_id)
1980
return self._transform.final_kind(trans_id)
1982
def stored_kind(self, file_id):
1983
trans_id = self._transform.trans_id_file_id(file_id)
1985
return self._transform._new_contents[trans_id]
1987
return self._transform._tree.stored_kind(file_id)
1989
def get_file_mtime(self, file_id, path=None):
1990
"""See Tree.get_file_mtime"""
1991
if not self._content_change(file_id):
1992
return self._transform._tree.get_file_mtime(file_id)
1993
return self._stat_limbo_file(file_id).st_mtime
1995
def _file_size(self, entry, stat_value):
1996
return self.get_file_size(entry.file_id)
1998
def get_file_size(self, file_id):
1999
"""See Tree.get_file_size"""
2000
if self.kind(file_id) == 'file':
2001
return self._transform._tree.get_file_size(file_id)
2005
def get_file_sha1(self, file_id, path=None, stat_value=None):
2006
trans_id = self._transform.trans_id_file_id(file_id)
2007
kind = self._transform._new_contents.get(trans_id)
2009
return self._transform._tree.get_file_sha1(file_id)
2011
fileobj = self.get_file(file_id)
2013
return sha_file(fileobj)
2017
def is_executable(self, file_id, path=None):
2020
trans_id = self._transform.trans_id_file_id(file_id)
2022
return self._transform._new_executability[trans_id]
2025
return self._transform._tree.is_executable(file_id, path)
2027
if e.errno == errno.ENOENT:
2030
except errors.NoSuchId:
2033
def path_content_summary(self, path):
2034
trans_id = self._path2trans_id(path)
2035
tt = self._transform
2036
tree_path = tt._tree_id_paths.get(trans_id)
2037
kind = tt._new_contents.get(trans_id)
2039
if tree_path is None or trans_id in tt._removed_contents:
2040
return 'missing', None, None, None
2041
summary = tt._tree.path_content_summary(tree_path)
2042
kind, size, executable, link_or_sha1 = summary
2045
limbo_name = tt._limbo_name(trans_id)
2046
if trans_id in tt._new_reference_revision:
2047
kind = 'tree-reference'
2049
statval = os.lstat(limbo_name)
2050
size = statval.st_size
2051
if not supports_executable():
2054
executable = statval.st_mode & S_IEXEC
2058
if kind == 'symlink':
2059
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2060
executable = tt._new_executability.get(trans_id, executable)
2061
return kind, size, executable, link_or_sha1
2063
def iter_changes(self, from_tree, include_unchanged=False,
2064
specific_files=None, pb=None, extra_trees=None,
2065
require_versioned=True, want_unversioned=False):
2066
"""See InterTree.iter_changes.
2068
This has a fast path that is only used when the from_tree matches
2069
the transform tree, and no fancy options are supplied.
2071
if (from_tree is not self._transform._tree or include_unchanged or
2072
specific_files or want_unversioned):
2073
return tree.InterTree(from_tree, self).iter_changes(
2074
include_unchanged=include_unchanged,
2075
specific_files=specific_files,
2077
extra_trees=extra_trees,
2078
require_versioned=require_versioned,
2079
want_unversioned=want_unversioned)
2080
if want_unversioned:
2081
raise ValueError('want_unversioned is not supported')
2082
return self._transform.iter_changes()
2084
def get_file(self, file_id, path=None):
2085
"""See Tree.get_file"""
2086
if not self._content_change(file_id):
2087
return self._transform._tree.get_file(file_id, path)
2088
trans_id = self._transform.trans_id_file_id(file_id)
2089
name = self._transform._limbo_name(trans_id)
2090
return open(name, 'rb')
2092
def get_file_with_stat(self, file_id, path=None):
2093
return self.get_file(file_id, path), None
2095
def annotate_iter(self, file_id,
2096
default_revision=_mod_revision.CURRENT_REVISION):
2097
changes = self._iter_changes_cache.get(file_id)
2101
changed_content, versioned, kind = (changes[2], changes[3],
2105
get_old = (kind[0] == 'file' and versioned[0])
2107
old_annotation = self._transform._tree.annotate_iter(file_id,
2108
default_revision=default_revision)
2112
return old_annotation
2113
if not changed_content:
2114
return old_annotation
2115
# TODO: This is doing something similar to what WT.annotate_iter is
2116
# doing, however it fails slightly because it doesn't know what
2117
# the *other* revision_id is, so it doesn't know how to give the
2118
# other as the origin for some lines, they all get
2119
# 'default_revision'
2120
# It would be nice to be able to use the new Annotator based
2121
# approach, as well.
2122
return annotate.reannotate([old_annotation],
2123
self.get_file(file_id).readlines(),
2126
def get_symlink_target(self, file_id):
2127
"""See Tree.get_symlink_target"""
2128
if not self._content_change(file_id):
2129
return self._transform._tree.get_symlink_target(file_id)
2130
trans_id = self._transform.trans_id_file_id(file_id)
2131
name = self._transform._limbo_name(trans_id)
2132
return osutils.readlink(name)
2134
def walkdirs(self, prefix=''):
2135
pending = [self._transform.root]
2136
while len(pending) > 0:
2137
parent_id = pending.pop()
2140
prefix = prefix.rstrip('/')
2141
parent_path = self._final_paths.get_path(parent_id)
2142
parent_file_id = self._transform.final_file_id(parent_id)
2143
for child_id in self._all_children(parent_id):
2144
path_from_root = self._final_paths.get_path(child_id)
2145
basename = self._transform.final_name(child_id)
2146
file_id = self._transform.final_file_id(child_id)
2148
kind = self._transform.final_kind(child_id)
2149
versioned_kind = kind
2152
versioned_kind = self._transform._tree.stored_kind(file_id)
2153
if versioned_kind == 'directory':
2154
subdirs.append(child_id)
2155
children.append((path_from_root, basename, kind, None,
2156
file_id, versioned_kind))
2158
if parent_path.startswith(prefix):
2159
yield (parent_path, parent_file_id), children
2160
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2163
def get_parent_ids(self):
2164
return self._parent_ids
2166
def set_parent_ids(self, parent_ids):
2167
self._parent_ids = parent_ids
2169
def get_revision_tree(self, revision_id):
2170
return self._transform._tree.get_revision_tree(revision_id)
523
raise NotImplementedError(self.cancel_creation)
525
def cook_conflicts(self, raw_conflicts):
528
raise NotImplementedError(self.cook_conflicts)
531
class OrphaningError(errors.BzrError):
533
# Only bugs could lead to such exception being seen by the user
534
internal_error = True
535
_fmt = "Error while orphaning %s in %s directory"
537
def __init__(self, orphan, parent):
538
errors.BzrError.__init__(self)
543
class OrphaningForbidden(OrphaningError):
545
_fmt = "Policy: %s doesn't allow creating orphans."
547
def __init__(self, policy):
548
errors.BzrError.__init__(self)
552
def move_orphan(tt, orphan_id, parent_id):
553
"""See TreeTransformBase.new_orphan.
555
This creates a new orphan in the `brz-orphans` dir at the root of the
558
:param tt: The TreeTransform orphaning `trans_id`.
560
:param orphan_id: The trans id that should be orphaned.
562
:param parent_id: The orphan parent trans id.
564
# Add the orphan dir if it doesn't exist
565
orphan_dir_basename = 'brz-orphans'
566
od_id = tt.trans_id_tree_path(orphan_dir_basename)
567
if tt.final_kind(od_id) is None:
568
tt.create_directory(od_id)
569
parent_path = tt._tree_id_paths[parent_id]
570
# Find a name that doesn't exist yet in the orphan dir
571
actual_name = tt.final_name(orphan_id)
572
new_name = tt._available_backup_name(actual_name, od_id)
573
tt.adjust_path(new_name, od_id, orphan_id)
574
trace.warning('%s has been orphaned in %s'
575
% (joinpath(parent_path, actual_name), orphan_dir_basename))
578
def refuse_orphan(tt, orphan_id, parent_id):
579
"""See TreeTransformBase.new_orphan.
581
This refuses to create orphan, letting the caller handle the conflict.
583
raise OrphaningForbidden('never')
586
orphaning_registry = registry.Registry()
587
orphaning_registry.register(
588
u'conflict', refuse_orphan,
589
'Leave orphans in place and create a conflict on the directory.')
590
orphaning_registry.register(
591
u'move', move_orphan,
592
'Move orphans into the brz-orphans directory.')
593
orphaning_registry._set_default_key(u'conflict')
596
opt_transform_orphan = _mod_config.RegistryOption(
597
'transform.orphan_policy', orphaning_registry,
598
help='Policy for orphaned files during transform operations.',
2173
602
def joinpath(parent, child):
2209
639
return [(self.get_path(t), t) for t in trans_ids]
2213
def topology_sorted_ids(tree):
2214
"""Determine the topological order of the ids in a tree"""
2215
file_ids = list(tree)
2216
file_ids.sort(key=tree.id2path)
2220
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2221
delta_from_tree=False):
2222
"""Create working tree for a branch, using a TreeTransform.
2224
This function should be used on empty trees, having a tree root at most.
2225
(see merge and revert functionality for working with existing trees)
2227
Existing files are handled like so:
2229
- Existing bzrdirs take precedence over creating new items. They are
2230
created as '%s.diverted' % name.
2231
- Otherwise, if the content on disk matches the content we are building,
2232
it is silently replaced.
2233
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2235
:param tree: The tree to convert wt into a copy of
2236
:param wt: The working tree that files will be placed into
2237
:param accelerator_tree: A tree which can be used for retrieving file
2238
contents more quickly than tree itself, i.e. a workingtree. tree
2239
will be used for cases where accelerator_tree's content is different.
2240
:param hardlink: If true, hard-link files to accelerator_tree, where
2241
possible. accelerator_tree must implement abspath, i.e. be a
2243
:param delta_from_tree: If true, build_tree may use the input Tree to
2244
generate the inventory delta.
2246
wt.lock_tree_write()
2250
if accelerator_tree is not None:
2251
accelerator_tree.lock_read()
2253
return _build_tree(tree, wt, accelerator_tree, hardlink,
2256
if accelerator_tree is not None:
2257
accelerator_tree.unlock()
2264
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2265
"""See build_tree."""
2266
for num, _unused in enumerate(wt.all_file_ids()):
2267
if num > 0: # more than just a root
2268
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2269
existing_files = set()
2270
for dir, files in wt.walkdirs():
2271
existing_files.update(f[0] for f in files)
2273
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2274
pp = ProgressPhase("Build phase", 2, top_pb)
2275
if tree.inventory.root is not None:
2276
# This is kind of a hack: we should be altering the root
2277
# as part of the regular tree shape diff logic.
2278
# The conditional test here is to avoid doing an
2279
# expensive operation (flush) every time the root id
2280
# is set within the tree, nor setting the root and thus
2281
# marking the tree as dirty, because we use two different
2282
# idioms here: tree interfaces and inventory interfaces.
2283
if wt.get_root_id() != tree.get_root_id():
2284
wt.set_root_id(tree.get_root_id())
2286
tt = TreeTransform(wt)
2290
file_trans_id[wt.get_root_id()] = \
2291
tt.trans_id_tree_file_id(wt.get_root_id())
2292
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2294
deferred_contents = []
2296
total = len(tree.inventory)
2298
precomputed_delta = []
2300
precomputed_delta = None
2301
for num, (tree_path, entry) in \
2302
enumerate(tree.inventory.iter_entries_by_dir()):
2303
pb.update("Building tree", num - len(deferred_contents), total)
2304
if entry.parent_id is None:
2307
file_id = entry.file_id
2309
precomputed_delta.append((None, tree_path, file_id, entry))
2310
if tree_path in existing_files:
2311
target_path = wt.abspath(tree_path)
2312
kind = file_kind(target_path)
2313
if kind == "directory":
2315
bzrdir.BzrDir.open(target_path)
2316
except errors.NotBranchError:
2320
if (file_id not in divert and
2321
_content_match(tree, entry, file_id, kind,
2323
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2324
if kind == 'directory':
2326
parent_id = file_trans_id[entry.parent_id]
2327
if entry.kind == 'file':
2328
# We *almost* replicate new_by_entry, so that we can defer
2329
# getting the file text, and get them all at once.
2330
trans_id = tt.create_path(entry.name, parent_id)
2331
file_trans_id[file_id] = trans_id
2332
tt.version_file(file_id, trans_id)
2333
executable = tree.is_executable(file_id, tree_path)
2335
tt.set_executability(executable, trans_id)
2336
trans_data = (trans_id, tree_path)
2337
deferred_contents.append((file_id, trans_data))
2339
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2342
new_trans_id = file_trans_id[file_id]
2343
old_parent = tt.trans_id_tree_path(tree_path)
2344
_reparent_children(tt, old_parent, new_trans_id)
2345
offset = num + 1 - len(deferred_contents)
2346
_create_files(tt, tree, deferred_contents, pb, offset,
2347
accelerator_tree, hardlink)
2351
divert_trans = set(file_trans_id[f] for f in divert)
2352
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2353
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2354
if len(raw_conflicts) > 0:
2355
precomputed_delta = None
2356
conflicts = cook_conflicts(raw_conflicts, tt)
2357
for conflict in conflicts:
2360
wt.add_conflicts(conflicts)
2361
except errors.UnsupportedOperation:
2363
result = tt.apply(no_conflicts=True,
2364
precomputed_delta=precomputed_delta)
2371
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2373
total = len(desired_files) + offset
2375
if accelerator_tree is None:
2376
new_desired_files = desired_files
2378
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2379
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2380
in iter if not (c or e[0] != e[1])]
2381
if accelerator_tree.supports_content_filtering():
2382
unchanged = [(f, p) for (f, p) in unchanged
2383
if not accelerator_tree.iter_search_rules([p]).next()]
2384
unchanged = dict(unchanged)
2385
new_desired_files = []
2387
for file_id, (trans_id, tree_path) in desired_files:
2388
accelerator_path = unchanged.get(file_id)
2389
if accelerator_path is None:
2390
new_desired_files.append((file_id, (trans_id, tree_path)))
2392
pb.update('Adding file contents', count + offset, total)
2394
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2397
contents = accelerator_tree.get_file(file_id, accelerator_path)
2398
if wt.supports_content_filtering():
2399
filters = wt._content_filter_stack(tree_path)
2400
contents = filtered_output_bytes(contents, filters,
2401
ContentFilterContext(tree_path, tree))
2403
tt.create_file(contents, trans_id)
2407
except AttributeError:
2408
# after filtering, contents may no longer be file-like
2412
for count, ((trans_id, tree_path), contents) in enumerate(
2413
tree.iter_files_bytes(new_desired_files)):
2414
if wt.supports_content_filtering():
2415
filters = wt._content_filter_stack(tree_path)
2416
contents = filtered_output_bytes(contents, filters,
2417
ContentFilterContext(tree_path, tree))
2418
tt.create_file(contents, trans_id)
2419
pb.update('Adding file contents', count + offset, total)
2422
642
def _reparent_children(tt, old_parent, new_parent):
2423
643
for child in tt.iter_tree_children(old_parent):
2424
644
tt.adjust_path(tt.final_name(child), new_parent, child)
2426
647
def _reparent_transform_children(tt, old_parent, new_parent):
2427
648
by_parent = tt.by_parent()
2428
649
for child in by_parent[old_parent]:
2429
650
tt.adjust_path(tt.final_name(child), new_parent, child)
2430
651
return by_parent[old_parent]
2432
def _content_match(tree, entry, file_id, kind, target_path):
2433
if entry.kind != kind:
2435
if entry.kind == "directory":
2437
if entry.kind == "file":
2438
if tree.get_file(file_id).read() == file(target_path, 'rb').read():
2440
elif entry.kind == "symlink":
2441
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2446
def resolve_checkout(tt, conflicts, divert):
2447
new_conflicts = set()
2448
for c_type, conflict in ((c[0], c) for c in conflicts):
2449
# Anything but a 'duplicate' would indicate programmer error
2450
if c_type != 'duplicate':
2451
raise AssertionError(c_type)
2452
# Now figure out which is new and which is old
2453
if tt.new_contents(conflict[1]):
2454
new_file = conflict[1]
2455
old_file = conflict[2]
2457
new_file = conflict[2]
2458
old_file = conflict[1]
2460
# We should only get here if the conflict wasn't completely
2462
final_parent = tt.final_parent(old_file)
2463
if new_file in divert:
2464
new_name = tt.final_name(old_file)+'.diverted'
2465
tt.adjust_path(new_name, final_parent, new_file)
2466
new_conflicts.add((c_type, 'Diverted to',
2467
new_file, old_file))
2469
new_name = tt.final_name(old_file)+'.moved'
2470
tt.adjust_path(new_name, final_parent, old_file)
2471
new_conflicts.add((c_type, 'Moved existing file to',
2472
old_file, new_file))
2473
return new_conflicts
2476
def new_by_entry(tt, entry, parent_id, tree):
654
def new_by_entry(path, tt, entry, parent_id, tree):
2477
655
"""Create a new file according to its inventory entry"""
2478
656
name = entry.name
2479
657
kind = entry.kind
2480
658
if kind == 'file':
2481
contents = tree.get_file(entry.file_id).readlines()
2482
executable = tree.is_executable(entry.file_id)
2483
return tt.new_file(name, parent_id, contents, entry.file_id,
659
with tree.get_file(path) as f:
660
executable = tree.is_executable(path)
662
name, parent_id, osutils.file_iterator(f), entry.file_id,
2485
664
elif kind in ('directory', 'tree-reference'):
2486
665
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2487
666
if kind == 'tree-reference':
2488
667
tt.set_tree_reference(entry.reference_revision, trans_id)
2490
669
elif kind == 'symlink':
2491
target = tree.get_symlink_target(entry.file_id)
670
target = tree.get_symlink_target(path)
2492
671
return tt.new_symlink(name, parent_id, target, entry.file_id)
2494
673
raise errors.BadFileKindError(name, kind)
2497
@deprecated_function(deprecated_in((1, 9, 0)))
2498
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
2499
"""Create new file contents according to an inventory entry.
2501
DEPRECATED. Use create_from_tree instead.
2503
if entry.kind == "file":
2505
lines = tree.get_file(entry.file_id).readlines()
2506
tt.create_file(lines, trans_id, mode_id=mode_id)
2507
elif entry.kind == "symlink":
2508
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
2509
elif entry.kind == "directory":
2510
tt.create_directory(trans_id)
2513
def create_from_tree(tt, trans_id, tree, file_id, bytes=None,
2514
filter_tree_path=None):
676
def create_from_tree(tt, trans_id, tree, path, chunks=None,
677
filter_tree_path=None):
2515
678
"""Create new file contents according to tree contents.
2517
680
:param filter_tree_path: the tree path to use to lookup
2518
681
content filters to apply to the bytes output in the working tree.
2519
682
This only applies if the working tree supports content filtering.
2521
kind = tree.kind(file_id)
684
kind = tree.kind(path)
2522
685
if kind == 'directory':
2523
686
tt.create_directory(trans_id)
2524
687
elif kind == "file":
2526
tree_file = tree.get_file(file_id)
2528
bytes = tree_file.readlines()
2532
if wt.supports_content_filtering() and filter_tree_path is not None:
2533
filters = wt._content_filter_stack(filter_tree_path)
2534
bytes = filtered_output_bytes(bytes, filters,
2535
ContentFilterContext(filter_tree_path, tree))
2536
tt.create_file(bytes, trans_id)
689
f = tree.get_file(path)
690
chunks = osutils.file_iterator(f)
695
if wt.supports_content_filtering() and filter_tree_path is not None:
696
filters = wt._content_filter_stack(filter_tree_path)
697
chunks = filtered_output_bytes(
699
ContentFilterContext(filter_tree_path, tree))
700
tt.create_file(chunks, trans_id)
2537
704
elif kind == "symlink":
2538
tt.create_symlink(tree.get_symlink_target(file_id), trans_id)
705
tt.create_symlink(tree.get_symlink_target(path), trans_id)
2540
707
raise AssertionError('Unknown kind %r' % kind)
2546
713
tt.set_executability(entry.executable, trans_id)
2549
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2550
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2553
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2554
"""Produce a backup-style name that appears to be available"""
2558
yield "%s.~%d~" % (name, counter)
2560
for new_name in name_gen():
2561
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2565
def _entry_changes(file_id, entry, working_tree):
2566
"""Determine in which ways the inventory entry has changed.
2568
Returns booleans: has_contents, content_mod, meta_mod
2569
has_contents means there are currently contents, but they differ
2570
contents_mod means contents need to be modified
2571
meta_mod means the metadata needs to be modified
2573
cur_entry = working_tree.inventory[file_id]
2575
working_kind = working_tree.kind(file_id)
2578
has_contents = False
2581
if has_contents is True:
2582
if entry.kind != working_kind:
2583
contents_mod, meta_mod = True, False
2585
cur_entry._read_tree_state(working_tree.id2path(file_id),
2587
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2588
cur_entry._forget_tree_state()
2589
return has_contents, contents_mod, meta_mod
716
def _prepare_revert_transform(es, working_tree, target_tree, tt, filenames,
717
backups, pp, basis_tree=None,
718
merge_modified=None):
719
with ui.ui_factory.nested_progress_bar() as child_pb:
720
if merge_modified is None:
721
merge_modified = working_tree.merge_modified()
722
merge_modified = _alter_files(es, working_tree, target_tree, tt,
723
child_pb, filenames, backups,
724
merge_modified, basis_tree)
725
with ui.ui_factory.nested_progress_bar() as child_pb:
726
raw_conflicts = resolve_conflicts(
727
tt, child_pb, lambda t, c: conflict_pass(t, c, target_tree))
728
conflicts = tt.cook_conflicts(raw_conflicts)
729
return conflicts, merge_modified
2592
732
def revert(working_tree, target_tree, filenames, backups=False,
2593
pb=None, change_reporter=None):
733
pb=None, change_reporter=None, merge_modified=None, basis_tree=None):
2594
734
"""Revert a working tree's contents to those of a target tree."""
2595
target_tree.lock_read()
2596
pb = ui.ui_factory.nested_progress_bar()
2597
tt = TreeTransform(working_tree, pb)
735
with contextlib.ExitStack() as es:
736
pb = es.enter_context(ui.ui_factory.nested_progress_bar())
737
es.enter_context(target_tree.lock_read())
738
tt = es.enter_context(working_tree.transform(pb))
2599
739
pp = ProgressPhase("Revert phase", 3, pb)
2600
740
conflicts, merge_modified = _prepare_revert_transform(
2601
working_tree, target_tree, tt, filenames, backups, pp)
741
es, working_tree, target_tree, tt, filenames, backups, pp)
2602
742
if change_reporter:
2603
744
change_reporter = delta._ChangeReporter(
2604
745
unversioned_filter=working_tree.is_ignored)
2605
746
delta.report_changes(tt.iter_changes(), change_reporter)
2606
747
for conflict in conflicts:
748
trace.warning(str(conflict))
2610
working_tree.set_merge_modified(merge_modified)
2612
target_tree.unlock()
751
if working_tree.supports_merge_modified():
752
working_tree.set_merge_modified(merge_modified)
2615
753
return conflicts
2618
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2619
backups, pp, basis_tree=None,
2620
merge_modified=None):
2621
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2623
if merge_modified is None:
2624
merge_modified = working_tree.merge_modified()
2625
merge_modified = _alter_files(working_tree, target_tree, tt,
2626
child_pb, filenames, backups,
2627
merge_modified, basis_tree)
2630
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2632
raw_conflicts = resolve_conflicts(tt, child_pb,
2633
lambda t, c: conflict_pass(t, c, target_tree))
2636
conflicts = cook_conflicts(raw_conflicts, tt)
2637
return conflicts, merge_modified
2640
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
756
def _alter_files(es, working_tree, target_tree, tt, pb, specific_files,
2641
757
backups, merge_modified, basis_tree=None):
2642
758
if basis_tree is not None:
2643
basis_tree.lock_read()
2644
change_list = target_tree.iter_changes(working_tree,
2645
specific_files=specific_files, pb=pb)
2646
if target_tree.get_root_id() is None:
759
es.enter_context(basis_tree.lock_read())
760
# We ask the working_tree for its changes relative to the target, rather
761
# than the target changes relative to the working tree. Because WT4 has an
762
# optimizer to compare itself to a target, but no optimizer for the
764
change_list = working_tree.iter_changes(
765
target_tree, specific_files=specific_files, pb=pb)
766
if not target_tree.is_versioned(u''):
2647
767
skip_root = True
2649
769
skip_root = False
2652
for id_num, (file_id, path, changed_content, versioned, parent, name,
2653
kind, executable) in enumerate(change_list):
2654
if skip_root and file_id[0] is not None and parent[0] is None:
2656
trans_id = tt.trans_id_file_id(file_id)
2659
keep_content = False
2660
if kind[0] == 'file' and (backups or kind[1] is None):
2661
wt_sha1 = working_tree.get_file_sha1(file_id)
2662
if merge_modified.get(file_id) != wt_sha1:
2663
# acquire the basis tree lazily to prevent the
2664
# expense of accessing it when it's not needed ?
2665
# (Guessing, RBC, 200702)
2666
if basis_tree is None:
2667
basis_tree = working_tree.basis_tree()
2668
basis_tree.lock_read()
2669
if file_id in basis_tree:
2670
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2672
elif kind[1] is None and not versioned[1]:
2674
if kind[0] is not None:
2675
if not keep_content:
2676
tt.delete_contents(trans_id)
2677
elif kind[1] is not None:
2678
parent_trans_id = tt.trans_id_file_id(parent[0])
2679
by_parent = tt.by_parent()
2680
backup_name = _get_backup_name(name[0], by_parent,
2681
parent_trans_id, tt)
2682
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2683
new_trans_id = tt.create_path(name[0], parent_trans_id)
2684
if versioned == (True, True):
2685
tt.unversion_file(trans_id)
2686
tt.version_file(file_id, new_trans_id)
2687
# New contents should have the same unix perms as old
2690
trans_id = new_trans_id
2691
if kind[1] in ('directory', 'tree-reference'):
2692
tt.create_directory(trans_id)
2693
if kind[1] == 'tree-reference':
2694
revision = target_tree.get_reference_revision(file_id,
2696
tt.set_tree_reference(revision, trans_id)
2697
elif kind[1] == 'symlink':
2698
tt.create_symlink(target_tree.get_symlink_target(file_id),
2700
elif kind[1] == 'file':
2701
deferred_files.append((file_id, (trans_id, mode_id)))
771
for id_num, change in enumerate(change_list):
772
target_path, wt_path = change.path
773
target_versioned, wt_versioned = change.versioned
774
target_parent = change.parent_id[0]
775
target_name, wt_name = change.name
776
target_kind, wt_kind = change.kind
777
target_executable, wt_executable = change.executable
778
if skip_root and wt_path == '':
781
if wt_path is not None:
782
trans_id = tt.trans_id_tree_path(wt_path)
784
trans_id = tt.assign_id()
785
if change.changed_content:
787
if wt_kind == 'file' and (backups or target_kind is None):
788
wt_sha1 = working_tree.get_file_sha1(wt_path)
789
if merge_modified.get(wt_path) != wt_sha1:
790
# acquire the basis tree lazily to prevent the
791
# expense of accessing it when it's not needed ?
792
# (Guessing, RBC, 200702)
2702
793
if basis_tree is None:
2703
794
basis_tree = working_tree.basis_tree()
2704
basis_tree.lock_read()
2705
new_sha1 = target_tree.get_file_sha1(file_id)
2706
if (file_id in basis_tree and new_sha1 ==
2707
basis_tree.get_file_sha1(file_id)):
2708
if file_id in merge_modified:
2709
del merge_modified[file_id]
795
es.enter_context(basis_tree.lock_read())
796
basis_inter = InterTree.get(basis_tree, working_tree)
797
basis_path = basis_inter.find_source_path(wt_path)
798
if basis_path is None:
799
if target_kind is None and not target_versioned:
2711
merge_modified[file_id] = new_sha1
802
if wt_sha1 != basis_tree.get_file_sha1(basis_path):
804
if wt_kind is not None:
806
tt.delete_contents(trans_id)
807
elif target_kind is not None:
808
parent_trans_id = tt.trans_id_tree_path(osutils.dirname(wt_path))
809
backup_name = tt._available_backup_name(
810
wt_name, parent_trans_id)
811
tt.adjust_path(backup_name, parent_trans_id, trans_id)
812
new_trans_id = tt.create_path(wt_name, parent_trans_id)
813
if wt_versioned and target_versioned:
814
tt.unversion_file(trans_id)
816
new_trans_id, file_id=getattr(change, 'file_id', None))
817
# New contents should have the same unix perms as old
820
trans_id = new_trans_id
821
if target_kind in ('directory', 'tree-reference'):
822
tt.create_directory(trans_id)
823
if target_kind == 'tree-reference':
824
revision = target_tree.get_reference_revision(
826
tt.set_tree_reference(revision, trans_id)
827
elif target_kind == 'symlink':
828
tt.create_symlink(target_tree.get_symlink_target(
829
target_path), trans_id)
830
elif target_kind == 'file':
831
deferred_files.append(
832
(target_path, (trans_id, mode_id, target_path)))
833
if basis_tree is None:
834
basis_tree = working_tree.basis_tree()
835
es.enter_context(basis_tree.lock_read())
836
new_sha1 = target_tree.get_file_sha1(target_path)
837
basis_inter = InterTree.get(basis_tree, target_tree)
838
basis_path = basis_inter.find_source_path(target_path)
839
if (basis_path is not None and
840
new_sha1 == basis_tree.get_file_sha1(basis_path)):
841
# If the new contents of the file match what is in basis,
842
# then there is no need to store in merge_modified.
843
if basis_path in merge_modified:
844
del merge_modified[basis_path]
846
merge_modified[target_path] = new_sha1
2713
# preserve the execute bit when backing up
2714
if keep_content and executable[0] == executable[1]:
2715
tt.set_executability(executable[1], trans_id)
2716
elif kind[1] is not None:
2717
raise AssertionError(kind[1])
2718
if versioned == (False, True):
2719
tt.version_file(file_id, trans_id)
2720
if versioned == (True, False):
2721
tt.unversion_file(trans_id)
2722
if (name[1] is not None and
2723
(name[0] != name[1] or parent[0] != parent[1])):
2724
if name[1] == '' and parent[1] is None:
2725
parent_trans = ROOT_PARENT
2727
parent_trans = tt.trans_id_file_id(parent[1])
2728
if parent[0] is None and versioned[0]:
2729
tt.adjust_root_path(name[1], parent_trans)
2731
tt.adjust_path(name[1], parent_trans, trans_id)
2732
if executable[0] != executable[1] and kind[1] == "file":
2733
tt.set_executability(executable[1], trans_id)
2734
if working_tree.supports_content_filtering():
2735
for index, ((trans_id, mode_id), bytes) in enumerate(
848
# preserve the execute bit when backing up
849
if keep_content and wt_executable == target_executable:
850
tt.set_executability(target_executable, trans_id)
851
elif target_kind is not None:
852
raise AssertionError(target_kind)
853
if not wt_versioned and target_versioned:
855
trans_id, file_id=getattr(change, 'file_id', None))
856
if wt_versioned and not target_versioned:
857
tt.unversion_file(trans_id)
858
if (target_name is not None
859
and (wt_name != target_name or change.is_reparented())):
860
if target_path == '':
861
parent_trans = ROOT_PARENT
863
parent_trans = tt.trans_id_file_id(target_parent)
864
if wt_path == '' and wt_versioned:
865
tt.adjust_root_path(target_name, parent_trans)
867
tt.adjust_path(target_name, parent_trans, trans_id)
868
if wt_executable != target_executable and target_kind == "file":
869
tt.set_executability(target_executable, trans_id)
870
if working_tree.supports_content_filtering():
871
for (trans_id, mode_id, target_path), bytes in (
2736
872
target_tree.iter_files_bytes(deferred_files)):
2737
file_id = deferred_files[index][0]
2738
# We're reverting a tree to the target tree so using the
2739
# target tree to find the file path seems the best choice
2740
# here IMO - Ian C 27/Oct/2009
2741
filter_tree_path = target_tree.id2path(file_id)
2742
filters = working_tree._content_filter_stack(filter_tree_path)
2743
bytes = filtered_output_bytes(bytes, filters,
2744
ContentFilterContext(filter_tree_path, working_tree))
2745
tt.create_file(bytes, trans_id, mode_id)
2747
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
873
# We're reverting a tree to the target tree so using the
874
# target tree to find the file path seems the best choice
875
# here IMO - Ian C 27/Oct/2009
876
filters = working_tree._content_filter_stack(target_path)
877
bytes = filtered_output_bytes(
879
ContentFilterContext(target_path, working_tree))
880
tt.create_file(bytes, trans_id, mode_id)
882
for (trans_id, mode_id, target_path), bytes in target_tree.iter_files_bytes(
2748
883
deferred_files):
2749
tt.create_file(bytes, trans_id, mode_id)
2750
tt.fixup_new_roots()
2752
if basis_tree is not None:
884
tt.create_file(bytes, trans_id, mode_id)
2754
886
return merge_modified
2920
1076
def rollback(self):
2921
1077
"""Reverse all renames that have been performed"""
2922
1078
for from_, to in reversed(self.past_renames):
2923
osutils.rename(to, from_)
1080
os.rename(to, from_)
1081
except OSError as e:
1082
raise TransformRenameFailed(to, from_, str(e), e.errno)
2924
1083
# after rollback, don't reuse _FileMover
2926
pending_deletions = None
1084
self.past_renames = None
1085
self.pending_deletions = None
2928
1087
def apply_deletions(self):
2929
1088
"""Apply all marked deletions"""
2930
1089
for path in self.pending_deletions:
2931
1090
delete_any(path)
2932
1091
# after apply_deletions, don't reuse _FileMover
2934
pending_deletions = None
1092
self.past_renames = None
1093
self.pending_deletions = None
1096
def link_tree(target_tree, source_tree):
1097
"""Where possible, hard-link files in a tree to those in another tree.
1099
:param target_tree: Tree to change
1100
:param source_tree: Tree to hard-link from
1102
with target_tree.transform() as tt:
1103
for change in target_tree.iter_changes(source_tree, include_unchanged=True):
1104
if change.changed_content:
1106
if change.kind != ('file', 'file'):
1108
if change.executable[0] != change.executable[1]:
1110
trans_id = tt.trans_id_tree_path(change.path[1])
1111
tt.delete_contents(trans_id)
1112
tt.create_hardlink(source_tree.abspath(change.path[0]), trans_id)
1116
class PreviewTree(object):
1119
def __init__(self, transform):
1120
self._transform = transform
1121
self._parent_ids = []
1122
self.__by_parent = None
1123
self._path2trans_id_cache = {}
1124
self._all_children_cache = {}
1125
self._final_name_cache = {}
1127
def supports_setting_file_ids(self):
1128
raise NotImplementedError(self.supports_setting_file_ids)
1131
def _by_parent(self):
1132
if self.__by_parent is None:
1133
self.__by_parent = self._transform.by_parent()
1134
return self.__by_parent
1136
def get_parent_ids(self):
1137
return self._parent_ids
1139
def set_parent_ids(self, parent_ids):
1140
self._parent_ids = parent_ids
1142
def get_revision_tree(self, revision_id):
1143
return self._transform._tree.get_revision_tree(revision_id)
1145
def is_locked(self):
1148
def lock_read(self):
1149
# Perhaps in theory, this should lock the TreeTransform?
1150
return lock.LogicalLockResult(self.unlock)
1155
def _path2trans_id(self, path):
1156
"""Look up the trans id associated with a path.
1158
:param path: path to look up, None when the path does not exist
1161
# We must not use None here, because that is a valid value to store.
1162
trans_id = self._path2trans_id_cache.get(path, object)
1163
if trans_id is not object:
1165
segments = osutils.splitpath(path)
1166
cur_parent = self._transform.root
1167
for cur_segment in segments:
1168
for child in self._all_children(cur_parent):
1169
final_name = self._final_name_cache.get(child)
1170
if final_name is None:
1171
final_name = self._transform.final_name(child)
1172
self._final_name_cache[child] = final_name
1173
if final_name == cur_segment:
1177
self._path2trans_id_cache[path] = None
1179
self._path2trans_id_cache[path] = cur_parent
1182
def _all_children(self, trans_id):
1183
children = self._all_children_cache.get(trans_id)
1184
if children is not None:
1186
children = set(self._transform.iter_tree_children(trans_id))
1187
# children in the _new_parent set are provided by _by_parent.
1188
children.difference_update(self._transform._new_parent)
1189
children.update(self._by_parent.get(trans_id, []))
1190
self._all_children_cache[trans_id] = children
1193
def get_file_with_stat(self, path):
1194
return self.get_file(path), None
1196
def is_executable(self, path):
1197
trans_id = self._path2trans_id(path)
1198
if trans_id is None:
1201
return self._transform._new_executability[trans_id]
1204
return self._transform._tree.is_executable(path)
1205
except OSError as e:
1206
if e.errno == errno.ENOENT:
1209
except errors.NoSuchFile:
1212
def has_filename(self, path):
1213
trans_id = self._path2trans_id(path)
1214
if trans_id in self._transform._new_contents:
1216
elif trans_id in self._transform._removed_contents:
1219
return self._transform._tree.has_filename(path)
1221
def get_file_sha1(self, path, stat_value=None):
1222
trans_id = self._path2trans_id(path)
1223
if trans_id is None:
1224
raise errors.NoSuchFile(path)
1225
kind = self._transform._new_contents.get(trans_id)
1227
return self._transform._tree.get_file_sha1(path)
1229
with self.get_file(path) as fileobj:
1230
return osutils.sha_file(fileobj)
1232
def get_file_verifier(self, path, stat_value=None):
1233
trans_id = self._path2trans_id(path)
1234
if trans_id is None:
1235
raise errors.NoSuchFile(path)
1236
kind = self._transform._new_contents.get(trans_id)
1238
return self._transform._tree.get_file_verifier(path)
1240
with self.get_file(path) as fileobj:
1241
return ("SHA1", osutils.sha_file(fileobj))
1243
def kind(self, path):
1244
trans_id = self._path2trans_id(path)
1245
if trans_id is None:
1246
raise errors.NoSuchFile(path)
1247
return self._transform.final_kind(trans_id)
1249
def stored_kind(self, path):
1250
trans_id = self._path2trans_id(path)
1251
if trans_id is None:
1252
raise errors.NoSuchFile(path)
1254
return self._transform._new_contents[trans_id]
1256
return self._transform._tree.stored_kind(path)
1258
def _get_repository(self):
1259
repo = getattr(self._transform._tree, '_repository', None)
1261
repo = self._transform._tree.branch.repository
1264
def _iter_parent_trees(self):
1265
for revision_id in self.get_parent_ids():
1267
yield self.revision_tree(revision_id)
1268
except errors.NoSuchRevisionInTree:
1269
yield self._get_repository().revision_tree(revision_id)
1271
def get_file_size(self, path):
1272
"""See Tree.get_file_size"""
1273
trans_id = self._path2trans_id(path)
1274
if trans_id is None:
1275
raise errors.NoSuchFile(path)
1276
kind = self._transform.final_kind(trans_id)
1279
if trans_id in self._transform._new_contents:
1280
return self._stat_limbo_file(trans_id).st_size
1281
if self.kind(path) == 'file':
1282
return self._transform._tree.get_file_size(path)
1286
def get_reference_revision(self, path):
1287
trans_id = self._path2trans_id(path)
1288
if trans_id is None:
1289
raise errors.NoSuchFile(path)
1290
reference_revision = self._transform._new_reference_revision.get(trans_id)
1291
if reference_revision is None:
1292
return self._transform._tree.get_reference_revision(path)
1293
return reference_revision
1295
def tree_kind(self, trans_id):
1296
path = self._tree_id_paths.get(trans_id)
1299
kind = self._tree.path_content_summary(path)[0]
1300
if kind == 'missing':