861
795
self.create_symlink(target, trans_id)
798
def new_orphan(self, trans_id, parent_id):
799
"""Schedule an item to be orphaned.
801
When a directory is about to be removed, its children, if they are not
802
versioned are moved out of the way: they don't have a parent anymore.
804
:param trans_id: The trans_id of the existing item.
805
:param parent_id: The parent trans_id of the item.
807
raise NotImplementedError(self.new_orphan)
809
def _get_potential_orphans(self, dir_id):
810
"""Find the potential orphans in a directory.
812
A directory can't be safely deleted if there are versioned files in it.
813
If all the contained files are unversioned then they can be orphaned.
815
The 'None' return value means that the directory contains at least one
816
versioned file and should not be deleted.
818
:param dir_id: The directory trans id.
820
:return: A list of the orphan trans ids or None if at least one
821
versioned file is present.
824
# Find the potential orphans, stop if one item should be kept
825
for child_tid in self.by_parent()[dir_id]:
826
if child_tid in self._removed_contents:
827
# The child is removed as part of the transform. Since it was
828
# versioned before, it's not an orphan
830
elif self.final_file_id(child_tid) is None:
831
# The child is not versioned
832
orphans.append(child_tid)
834
# We have a versioned file here, searching for orphans is
840
def _affected_ids(self):
841
"""Return the set of transform ids affected by the transform"""
842
trans_ids = set(self._removed_id)
843
trans_ids.update(self._new_id.keys())
844
trans_ids.update(self._removed_contents)
845
trans_ids.update(self._new_contents.keys())
846
trans_ids.update(self._new_executability.keys())
847
trans_ids.update(self._new_name.keys())
848
trans_ids.update(self._new_parent.keys())
851
def _get_file_id_maps(self):
852
"""Return mapping of file_ids to trans_ids in the to and from states"""
853
trans_ids = self._affected_ids()
856
# Build up two dicts: trans_ids associated with file ids in the
857
# FROM state, vs the TO state.
858
for trans_id in trans_ids:
859
from_file_id = self.tree_file_id(trans_id)
860
if from_file_id is not None:
861
from_trans_ids[from_file_id] = trans_id
862
to_file_id = self.final_file_id(trans_id)
863
if to_file_id is not None:
864
to_trans_ids[to_file_id] = trans_id
865
return from_trans_ids, to_trans_ids
867
def _from_file_data(self, from_trans_id, from_versioned, file_id):
868
"""Get data about a file in the from (tree) state
870
Return a (name, parent, kind, executable) tuple
872
from_path = self._tree_id_paths.get(from_trans_id)
874
# get data from working tree if versioned
875
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
876
from_name = from_entry.name
877
from_parent = from_entry.parent_id
880
if from_path is None:
881
# File does not exist in FROM state
885
# File exists, but is not versioned. Have to use path-
887
from_name = os.path.basename(from_path)
888
tree_parent = self.get_tree_parent(from_trans_id)
889
from_parent = self.tree_file_id(tree_parent)
890
if from_path is not None:
891
from_kind, from_executable, from_stats = \
892
self._tree._comparison_data(from_entry, from_path)
895
from_executable = False
896
return from_name, from_parent, from_kind, from_executable
898
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
899
"""Get data about a file in the to (target) state
901
Return a (name, parent, kind, executable) tuple
903
to_name = self.final_name(to_trans_id)
904
to_kind = self.final_kind(to_trans_id)
905
to_parent = self.final_file_id(self.final_parent(to_trans_id))
906
if to_trans_id in self._new_executability:
907
to_executable = self._new_executability[to_trans_id]
908
elif to_trans_id == from_trans_id:
909
to_executable = from_executable
911
to_executable = False
912
return to_name, to_parent, to_kind, to_executable
914
def iter_changes(self):
915
"""Produce output in the same format as Tree.iter_changes.
917
Will produce nonsensical results if invoked while inventory/filesystem
918
conflicts (as reported by TreeTransform.find_conflicts()) are present.
920
This reads the Transform, but only reproduces changes involving a
921
file_id. Files that are not versioned in either of the FROM or TO
922
states are not reflected.
924
final_paths = FinalPaths(self)
925
from_trans_ids, to_trans_ids = self._get_file_id_maps()
927
# Now iterate through all active file_ids
928
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
930
from_trans_id = from_trans_ids.get(file_id)
931
# find file ids, and determine versioning state
932
if from_trans_id is None:
933
from_versioned = False
934
from_trans_id = to_trans_ids[file_id]
936
from_versioned = True
937
to_trans_id = to_trans_ids.get(file_id)
938
if to_trans_id is None:
940
to_trans_id = from_trans_id
944
from_name, from_parent, from_kind, from_executable = \
945
self._from_file_data(from_trans_id, from_versioned, file_id)
947
to_name, to_parent, to_kind, to_executable = \
948
self._to_file_data(to_trans_id, from_trans_id, from_executable)
950
if not from_versioned:
953
from_path = self._tree_id_paths.get(from_trans_id)
957
to_path = final_paths.get_path(to_trans_id)
958
if from_kind != to_kind:
960
elif to_kind in ('file', 'symlink') and (
961
to_trans_id != from_trans_id or
962
to_trans_id in self._new_contents):
964
if (not modified and from_versioned == to_versioned and
965
from_parent==to_parent and from_name == to_name and
966
from_executable == to_executable):
968
results.append((file_id, (from_path, to_path), modified,
969
(from_versioned, to_versioned),
970
(from_parent, to_parent),
971
(from_name, to_name),
972
(from_kind, to_kind),
973
(from_executable, to_executable)))
974
return iter(sorted(results, key=lambda x:x[1]))
976
def get_preview_tree(self):
977
"""Return a tree representing the result of the transform.
979
The tree is a snapshot, and altering the TreeTransform will invalidate
982
return _PreviewTree(self)
984
def commit(self, branch, message, merge_parents=None, strict=False,
985
timestamp=None, timezone=None, committer=None, authors=None,
986
revprops=None, revision_id=None):
987
"""Commit the result of this TreeTransform to a branch.
989
:param branch: The branch to commit to.
990
:param message: The message to attach to the commit.
991
:param merge_parents: Additional parent revision-ids specified by
993
:param strict: If True, abort the commit if there are unversioned
995
:param timestamp: if not None, seconds-since-epoch for the time and
996
date. (May be a float.)
997
:param timezone: Optional timezone for timestamp, as an offset in
999
:param committer: Optional committer in email-id format.
1000
(e.g. "J Random Hacker <jrandom@example.com>")
1001
:param authors: Optional list of authors in email-id format.
1002
:param revprops: Optional dictionary of revision properties.
1003
:param revision_id: Optional revision id. (Specifying a revision-id
1004
may reduce performance for some non-native formats.)
1005
:return: The revision_id of the revision committed.
1007
self._check_malformed()
1009
unversioned = set(self._new_contents).difference(set(self._new_id))
1010
for trans_id in unversioned:
1011
if self.final_file_id(trans_id) is None:
1012
raise errors.StrictCommitFailed()
1014
revno, last_rev_id = branch.last_revision_info()
1015
if last_rev_id == _mod_revision.NULL_REVISION:
1016
if merge_parents is not None:
1017
raise ValueError('Cannot supply merge parents for first'
1021
parent_ids = [last_rev_id]
1022
if merge_parents is not None:
1023
parent_ids.extend(merge_parents)
1024
if self._tree.get_revision_id() != last_rev_id:
1025
raise ValueError('TreeTransform not based on branch basis: %s' %
1026
self._tree.get_revision_id())
1027
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1028
builder = branch.get_commit_builder(parent_ids,
1029
timestamp=timestamp,
1031
committer=committer,
1033
revision_id=revision_id)
1034
preview = self.get_preview_tree()
1035
list(builder.record_iter_changes(preview, last_rev_id,
1036
self.iter_changes()))
1037
builder.finish_inventory()
1038
revision_id = builder.commit(message)
1039
branch.set_last_revision_info(revno + 1, revision_id)
1042
def _text_parent(self, trans_id):
1043
file_id = self.tree_file_id(trans_id)
1045
if file_id is None or self._tree.kind(file_id) != 'file':
1047
except errors.NoSuchFile:
1051
def _get_parents_texts(self, trans_id):
1052
"""Get texts for compression parents of this file."""
1053
file_id = self._text_parent(trans_id)
1056
return (self._tree.get_file_text(file_id),)
1058
def _get_parents_lines(self, trans_id):
1059
"""Get lines for compression parents of this file."""
1060
file_id = self._text_parent(trans_id)
1063
return (self._tree.get_file_lines(file_id),)
1065
def serialize(self, serializer):
1066
"""Serialize this TreeTransform.
1068
:param serializer: A Serialiser like pack.ContainerSerializer.
1070
new_name = dict((k, v.encode('utf-8')) for k, v in
1071
self._new_name.items())
1072
new_executability = dict((k, int(v)) for k, v in
1073
self._new_executability.items())
1074
tree_path_ids = dict((k.encode('utf-8'), v)
1075
for k, v in self._tree_path_ids.items())
1077
'_id_number': self._id_number,
1078
'_new_name': new_name,
1079
'_new_parent': self._new_parent,
1080
'_new_executability': new_executability,
1081
'_new_id': self._new_id,
1082
'_tree_path_ids': tree_path_ids,
1083
'_removed_id': list(self._removed_id),
1084
'_removed_contents': list(self._removed_contents),
1085
'_non_present_ids': self._non_present_ids,
1087
yield serializer.bytes_record(bencode.bencode(attribs),
1089
for trans_id, kind in self._new_contents.items():
1091
lines = osutils.chunks_to_lines(
1092
self._read_file_chunks(trans_id))
1093
parents = self._get_parents_lines(trans_id)
1094
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1095
content = ''.join(mpdiff.to_patch())
1096
if kind == 'directory':
1098
if kind == 'symlink':
1099
content = self._read_symlink_target(trans_id)
1100
yield serializer.bytes_record(content, ((trans_id, kind),))
1102
def deserialize(self, records):
1103
"""Deserialize a stored TreeTransform.
1105
:param records: An iterable of (names, content) tuples, as per
1106
pack.ContainerPushParser.
1108
names, content = records.next()
1109
attribs = bencode.bdecode(content)
1110
self._id_number = attribs['_id_number']
1111
self._new_name = dict((k, v.decode('utf-8'))
1112
for k, v in attribs['_new_name'].items())
1113
self._new_parent = attribs['_new_parent']
1114
self._new_executability = dict((k, bool(v)) for k, v in
1115
attribs['_new_executability'].items())
1116
self._new_id = attribs['_new_id']
1117
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1118
self._tree_path_ids = {}
1119
self._tree_id_paths = {}
1120
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1121
path = bytepath.decode('utf-8')
1122
self._tree_path_ids[path] = trans_id
1123
self._tree_id_paths[trans_id] = path
1124
self._removed_id = set(attribs['_removed_id'])
1125
self._removed_contents = set(attribs['_removed_contents'])
1126
self._non_present_ids = attribs['_non_present_ids']
1127
for ((trans_id, kind),), content in records:
1129
mpdiff = multiparent.MultiParent.from_patch(content)
1130
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1131
self.create_file(lines, trans_id)
1132
if kind == 'directory':
1133
self.create_directory(trans_id)
1134
if kind == 'symlink':
1135
self.create_symlink(content.decode('utf-8'), trans_id)
1138
class DiskTreeTransform(TreeTransformBase):
1139
"""Tree transform storing its contents on disk."""
1141
def __init__(self, tree, limbodir, pb=None,
1142
case_sensitive=True):
1144
:param tree: The tree that will be transformed, but not necessarily
1146
:param limbodir: A directory where new files can be stored until
1147
they are installed in their proper places
1149
:param case_sensitive: If True, the target of the transform is
1150
case sensitive, not just case preserving.
1152
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1153
self._limbodir = limbodir
1154
self._deletiondir = None
1155
# A mapping of transform ids to their limbo filename
1156
self._limbo_files = {}
1157
# A mapping of transform ids to a set of the transform ids of children
1158
# that their limbo directory has
1159
self._limbo_children = {}
1160
# Map transform ids to maps of child filename to child transform id
1161
self._limbo_children_names = {}
1162
# List of transform ids that need to be renamed from limbo into place
1163
self._needs_rename = set()
1164
self._creation_mtime = None
1167
"""Release the working tree lock, if held, clean up limbo dir.
1169
This is required if apply has not been invoked, but can be invoked
1172
if self._tree is None:
1175
entries = [(self._limbo_name(t), t, k) for t, k in
1176
self._new_contents.iteritems()]
1177
entries.sort(reverse=True)
1178
for path, trans_id, kind in entries:
1181
delete_any(self._limbodir)
1183
# We don't especially care *why* the dir is immortal.
1184
raise ImmortalLimbo(self._limbodir)
1186
if self._deletiondir is not None:
1187
delete_any(self._deletiondir)
1189
raise errors.ImmortalPendingDeletion(self._deletiondir)
1191
TreeTransformBase.finalize(self)
1193
def _limbo_name(self, trans_id):
1194
"""Generate the limbo name of a file"""
1195
limbo_name = self._limbo_files.get(trans_id)
1196
if limbo_name is None:
1197
limbo_name = self._generate_limbo_path(trans_id)
1198
self._limbo_files[trans_id] = limbo_name
1201
def _generate_limbo_path(self, trans_id):
1202
"""Generate a limbo path using the trans_id as the relative path.
1204
This is suitable as a fallback, and when the transform should not be
1205
sensitive to the path encoding of the limbo directory.
1207
self._needs_rename.add(trans_id)
1208
return pathjoin(self._limbodir, trans_id)
1210
def adjust_path(self, name, parent, trans_id):
1211
previous_parent = self._new_parent.get(trans_id)
1212
previous_name = self._new_name.get(trans_id)
1213
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1214
if (trans_id in self._limbo_files and
1215
trans_id not in self._needs_rename):
1216
self._rename_in_limbo([trans_id])
1217
if previous_parent != parent:
1218
self._limbo_children[previous_parent].remove(trans_id)
1219
if previous_parent != parent or previous_name != name:
1220
del self._limbo_children_names[previous_parent][previous_name]
1222
def _rename_in_limbo(self, trans_ids):
1223
"""Fix limbo names so that the right final path is produced.
1225
This means we outsmarted ourselves-- we tried to avoid renaming
1226
these files later by creating them with their final names in their
1227
final parents. But now the previous name or parent is no longer
1228
suitable, so we have to rename them.
1230
Even for trans_ids that have no new contents, we must remove their
1231
entries from _limbo_files, because they are now stale.
1233
for trans_id in trans_ids:
1234
old_path = self._limbo_files.pop(trans_id)
1235
if trans_id not in self._new_contents:
1237
new_path = self._limbo_name(trans_id)
1238
os.rename(old_path, new_path)
1239
for descendant in self._limbo_descendants(trans_id):
1240
desc_path = self._limbo_files[descendant]
1241
desc_path = new_path + desc_path[len(old_path):]
1242
self._limbo_files[descendant] = desc_path
1244
def _limbo_descendants(self, trans_id):
1245
"""Return the set of trans_ids whose limbo paths descend from this."""
1246
descendants = set(self._limbo_children.get(trans_id, []))
1247
for descendant in list(descendants):
1248
descendants.update(self._limbo_descendants(descendant))
1251
def create_file(self, contents, trans_id, mode_id=None):
1252
"""Schedule creation of a new file.
1256
Contents is an iterator of strings, all of which will be written
1257
to the target destination.
1259
New file takes the permissions of any existing file with that id,
1260
unless mode_id is specified.
1262
name = self._limbo_name(trans_id)
1263
f = open(name, 'wb')
1266
unique_add(self._new_contents, trans_id, 'file')
1268
# Clean up the file, it never got registered so
1269
# TreeTransform.finalize() won't clean it up.
1274
f.writelines(contents)
1277
self._set_mtime(name)
1278
self._set_mode(trans_id, mode_id, S_ISREG)
1280
def _read_file_chunks(self, trans_id):
1281
cur_file = open(self._limbo_name(trans_id), 'rb')
1283
return cur_file.readlines()
1287
def _read_symlink_target(self, trans_id):
1288
return os.readlink(self._limbo_name(trans_id))
1290
def _set_mtime(self, path):
1291
"""All files that are created get the same mtime.
1293
This time is set by the first object to be created.
1295
if self._creation_mtime is None:
1296
self._creation_mtime = time.time()
1297
os.utime(path, (self._creation_mtime, self._creation_mtime))
1299
def create_hardlink(self, path, trans_id):
1300
"""Schedule creation of a hard link"""
1301
name = self._limbo_name(trans_id)
1305
if e.errno != errno.EPERM:
1307
raise errors.HardLinkNotSupported(path)
1309
unique_add(self._new_contents, trans_id, 'file')
1311
# Clean up the file, it never got registered so
1312
# TreeTransform.finalize() won't clean it up.
1316
def create_directory(self, trans_id):
1317
"""Schedule creation of a new directory.
1319
See also new_directory.
1321
os.mkdir(self._limbo_name(trans_id))
1322
unique_add(self._new_contents, trans_id, 'directory')
1324
def create_symlink(self, target, trans_id):
1325
"""Schedule creation of a new symbolic link.
1327
target is a bytestring.
1328
See also new_symlink.
1331
os.symlink(target, self._limbo_name(trans_id))
1332
unique_add(self._new_contents, trans_id, 'symlink')
1335
path = FinalPaths(self).get_path(trans_id)
1338
raise UnableCreateSymlink(path=path)
1340
def cancel_creation(self, trans_id):
1341
"""Cancel the creation of new file contents."""
1342
del self._new_contents[trans_id]
1343
children = self._limbo_children.get(trans_id)
1344
# if this is a limbo directory with children, move them before removing
1346
if children is not None:
1347
self._rename_in_limbo(children)
1348
del self._limbo_children[trans_id]
1349
del self._limbo_children_names[trans_id]
1350
delete_any(self._limbo_name(trans_id))
1352
def new_orphan(self, trans_id, parent_id):
1353
# FIXME: There is no tree config, so we use the branch one (it's weird
1354
# to define it this way as orphaning can only occur in a working tree,
1355
# but that's all we have (for now). It will find the option in
1356
# locations.conf or bazaar.conf though) -- vila 20100916
1357
conf = self._tree.branch.get_config()
1358
conf_var_name = 'bzr.transform.orphan_policy'
1359
orphan_policy = conf.get_user_option(conf_var_name)
1360
default_policy = orphaning_registry.default_key
1361
if orphan_policy is None:
1362
orphan_policy = default_policy
1363
if orphan_policy not in orphaning_registry:
1364
trace.warning('%s (from %s) is not a known policy, defaulting to %s'
1365
% (orphan_policy, conf_var_name, default_policy))
1366
orphan_policy = default_policy
1367
handle_orphan = orphaning_registry.get(orphan_policy)
1368
handle_orphan(self, trans_id, parent_id)
1371
class OrphaningError(errors.BzrError):
1373
# Only bugs could lead to such exception being seen by the user
1374
internal_error = True
1375
_fmt = "Error while orphaning %s in %s directory"
1377
def __init__(self, orphan, parent):
1378
errors.BzrError.__init__(self)
1379
self.orphan = orphan
1380
self.parent = parent
1383
class OrphaningForbidden(OrphaningError):
1385
_fmt = "Policy: %s doesn't allow creating orphans."
1387
def __init__(self, policy):
1388
errors.BzrError.__init__(self)
1389
self.policy = policy
1392
def move_orphan(tt, orphan_id, parent_id):
1393
"""See TreeTransformBase.new_orphan.
1395
This creates a new orphan in the `bzr-orphans` dir at the root of the
1398
:param tt: The TreeTransform orphaning `trans_id`.
1400
:param orphan_id: The trans id that should be orphaned.
1402
:param parent_id: The orphan parent trans id.
1404
# Add the orphan dir if it doesn't exist
1405
orphan_dir_basename = 'bzr-orphans'
1406
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1407
if tt.final_kind(od_id) is None:
1408
tt.create_directory(od_id)
1409
parent_path = tt._tree_id_paths[parent_id]
1410
# Find a name that doesn't exist yet in the orphan dir
1411
actual_name = tt.final_name(orphan_id)
1412
new_name = tt._available_backup_name(actual_name, od_id)
1413
tt.adjust_path(new_name, od_id, orphan_id)
1414
trace.warning('%s has been orphaned in %s'
1415
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1418
def refuse_orphan(tt, orphan_id, parent_id):
1419
"""See TreeTransformBase.new_orphan.
1421
This refuses to create orphan, letting the caller handle the conflict.
1423
raise OrphaningForbidden('never')
1426
orphaning_registry = registry.Registry()
1427
orphaning_registry.register(
1428
'conflict', refuse_orphan,
1429
'Leave orphans in place and create a conflict on the directory.')
1430
orphaning_registry.register(
1431
'move', move_orphan,
1432
'Move orphans into the bzr-orphans directory.')
1433
orphaning_registry._set_default_key('conflict')
1436
class TreeTransform(DiskTreeTransform):
1437
"""Represent a tree transformation.
1439
This object is designed to support incremental generation of the transform,
1442
However, it gives optimum performance when parent directories are created
1443
before their contents. The transform is then able to put child files
1444
directly in their parent directory, avoiding later renames.
1446
It is easy to produce malformed transforms, but they are generally
1447
harmless. Attempting to apply a malformed transform will cause an
1448
exception to be raised before any modifications are made to the tree.
1450
Many kinds of malformed transforms can be corrected with the
1451
resolve_conflicts function. The remaining ones indicate programming error,
1452
such as trying to create a file with no path.
1454
Two sets of file creation methods are supplied. Convenience methods are:
1459
These are composed of the low-level methods:
1461
* create_file or create_directory or create_symlink
1465
Transform/Transaction ids
1466
-------------------------
1467
trans_ids are temporary ids assigned to all files involved in a transform.
1468
It's possible, even common, that not all files in the Tree have trans_ids.
1470
trans_ids are used because filenames and file_ids are not good enough
1471
identifiers; filenames change, and not all files have file_ids. File-ids
1472
are also associated with trans-ids, so that moving a file moves its
1475
trans_ids are only valid for the TreeTransform that generated them.
1479
Limbo is a temporary directory use to hold new versions of files.
1480
Files are added to limbo by create_file, create_directory, create_symlink,
1481
and their convenience variants (new_*). Files may be removed from limbo
1482
using cancel_creation. Files are renamed from limbo into their final
1483
location as part of TreeTransform.apply
1485
Limbo must be cleaned up, by either calling TreeTransform.apply or
1486
calling TreeTransform.finalize.
1488
Files are placed into limbo inside their parent directories, where
1489
possible. This reduces subsequent renames, and makes operations involving
1490
lots of files faster. This optimization is only possible if the parent
1491
directory is created *before* creating any of its children, so avoid
1492
creating children before parents, where possible.
1496
This temporary directory is used by _FileMover for storing files that are
1497
about to be deleted. In case of rollback, the files will be restored.
1498
FileMover does not delete files until it is sure that a rollback will not
1501
def __init__(self, tree, pb=None):
1502
"""Note: a tree_write lock is taken on the tree.
1504
Use TreeTransform.finalize() to release the lock (can be omitted if
1505
TreeTransform.apply() called).
1507
tree.lock_tree_write()
1510
limbodir = urlutils.local_path_from_url(
1511
tree._transport.abspath('limbo'))
1515
if e.errno == errno.EEXIST:
1516
raise ExistingLimbo(limbodir)
1517
deletiondir = urlutils.local_path_from_url(
1518
tree._transport.abspath('pending-deletion'))
1520
os.mkdir(deletiondir)
1522
if e.errno == errno.EEXIST:
1523
raise errors.ExistingPendingDeletion(deletiondir)
1528
# Cache of realpath results, to speed up canonical_path
1529
self._realpaths = {}
1530
# Cache of relpath results, to speed up canonical_path
1532
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1533
tree.case_sensitive)
1534
self._deletiondir = deletiondir
1536
def canonical_path(self, path):
1537
"""Get the canonical tree-relative path"""
1538
# don't follow final symlinks
1539
abs = self._tree.abspath(path)
1540
if abs in self._relpaths:
1541
return self._relpaths[abs]
1542
dirname, basename = os.path.split(abs)
1543
if dirname not in self._realpaths:
1544
self._realpaths[dirname] = os.path.realpath(dirname)
1545
dirname = self._realpaths[dirname]
1546
abs = pathjoin(dirname, basename)
1547
if dirname in self._relpaths:
1548
relpath = pathjoin(self._relpaths[dirname], basename)
1549
relpath = relpath.rstrip('/\\')
1551
relpath = self._tree.relpath(abs)
1552
self._relpaths[abs] = relpath
1555
def tree_kind(self, trans_id):
1556
"""Determine the file kind in the working tree.
1558
:returns: The file kind or None if the file does not exist
1560
path = self._tree_id_paths.get(trans_id)
1564
return file_kind(self._tree.abspath(path))
1565
except errors.NoSuchFile:
1568
def _set_mode(self, trans_id, mode_id, typefunc):
1569
"""Set the mode of new file contents.
1570
The mode_id is the existing file to get the mode from (often the same
1571
as trans_id). The operation is only performed if there's a mode match
1572
according to typefunc.
1577
old_path = self._tree_id_paths[mode_id]
1581
mode = os.stat(self._tree.abspath(old_path)).st_mode
1583
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1584
# Either old_path doesn't exist, or the parent of the
1585
# target is not a directory (but will be one eventually)
1586
# Either way, we know it doesn't exist *right now*
1587
# See also bug #248448
1592
os.chmod(self._limbo_name(trans_id), mode)
1594
def iter_tree_children(self, parent_id):
1595
"""Iterate through the entry's tree children, if any"""
1597
path = self._tree_id_paths[parent_id]
1601
children = os.listdir(self._tree.abspath(path))
1603
if not (osutils._is_error_enotdir(e)
1604
or e.errno in (errno.ENOENT, errno.ESRCH)):
1608
for child in children:
1609
childpath = joinpath(path, child)
1610
if self._tree.is_control_filename(childpath):
1612
yield self.trans_id_tree_path(childpath)
1614
def _generate_limbo_path(self, trans_id):
1615
"""Generate a limbo path using the final path if possible.
1617
This optimizes the performance of applying the tree transform by
1618
avoiding renames. These renames can be avoided only when the parent
1619
directory is already scheduled for creation.
1621
If the final path cannot be used, falls back to using the trans_id as
1624
parent = self._new_parent.get(trans_id)
1625
# if the parent directory is already in limbo (e.g. when building a
1626
# tree), choose a limbo name inside the parent, to reduce further
1628
use_direct_path = False
1629
if self._new_contents.get(parent) == 'directory':
1630
filename = self._new_name.get(trans_id)
1631
if filename is not None:
1632
if parent not in self._limbo_children:
1633
self._limbo_children[parent] = set()
1634
self._limbo_children_names[parent] = {}
1635
use_direct_path = True
1636
# the direct path can only be used if no other file has
1637
# already taken this pathname, i.e. if the name is unused, or
1638
# if it is already associated with this trans_id.
1639
elif self._case_sensitive_target:
1640
if (self._limbo_children_names[parent].get(filename)
1641
in (trans_id, None)):
1642
use_direct_path = True
1644
for l_filename, l_trans_id in\
1645
self._limbo_children_names[parent].iteritems():
1646
if l_trans_id == trans_id:
1648
if l_filename.lower() == filename.lower():
1651
use_direct_path = True
1653
if not use_direct_path:
1654
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1656
limbo_name = pathjoin(self._limbo_files[parent], filename)
1657
self._limbo_children[parent].add(trans_id)
1658
self._limbo_children_names[parent][filename] = trans_id
1662
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1663
"""Apply all changes to the inventory and filesystem.
1665
If filesystem or inventory conflicts are present, MalformedTransform
1668
If apply succeeds, finalize is not necessary.
1670
:param no_conflicts: if True, the caller guarantees there are no
1671
conflicts, so no check is made.
1672
:param precomputed_delta: An inventory delta to use instead of
1674
:param _mover: Supply an alternate FileMover, for testing
1676
if not no_conflicts:
1677
self._check_malformed()
1678
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1680
if precomputed_delta is None:
1681
child_pb.update('Apply phase', 0, 2)
1682
inventory_delta = self._generate_inventory_delta()
1685
inventory_delta = precomputed_delta
1688
mover = _FileMover()
1692
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1693
self._apply_removals(mover)
1694
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1695
modified_paths = self._apply_insertions(mover)
1700
mover.apply_deletions()
1703
self._tree.apply_inventory_delta(inventory_delta)
1706
return _TransformResults(modified_paths, self.rename_count)
1708
def _generate_inventory_delta(self):
1709
"""Generate an inventory delta for the current transform."""
1710
inventory_delta = []
1711
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1712
new_paths = self._inventory_altered()
1713
total_entries = len(new_paths) + len(self._removed_id)
1715
for num, trans_id in enumerate(self._removed_id):
1717
child_pb.update('removing file', num, total_entries)
1718
if trans_id == self._new_root:
1719
file_id = self._tree.get_root_id()
1721
file_id = self.tree_file_id(trans_id)
1722
# File-id isn't really being deleted, just moved
1723
if file_id in self._r_new_id:
1725
path = self._tree_id_paths[trans_id]
1726
inventory_delta.append((path, None, file_id, None))
1727
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1729
entries = self._tree.iter_entries_by_dir(
1730
new_path_file_ids.values())
1731
old_paths = dict((e.file_id, p) for p, e in entries)
1733
for num, (path, trans_id) in enumerate(new_paths):
1735
child_pb.update('adding file',
1736
num + len(self._removed_id), total_entries)
1737
file_id = new_path_file_ids[trans_id]
1741
kind = self.final_kind(trans_id)
1743
kind = self._tree.stored_kind(file_id)
1744
parent_trans_id = self.final_parent(trans_id)
1745
parent_file_id = new_path_file_ids.get(parent_trans_id)
1746
if parent_file_id is None:
1747
parent_file_id = self.final_file_id(parent_trans_id)
1748
if trans_id in self._new_reference_revision:
1749
new_entry = inventory.TreeReference(
1751
self._new_name[trans_id],
1752
self.final_file_id(self._new_parent[trans_id]),
1753
None, self._new_reference_revision[trans_id])
1755
new_entry = inventory.make_entry(kind,
1756
self.final_name(trans_id),
1757
parent_file_id, file_id)
1758
old_path = old_paths.get(new_entry.file_id)
1759
new_executability = self._new_executability.get(trans_id)
1760
if new_executability is not None:
1761
new_entry.executable = new_executability
1762
inventory_delta.append(
1763
(old_path, path, new_entry.file_id, new_entry))
1766
return inventory_delta
1768
def _apply_removals(self, mover):
1769
"""Perform tree operations that remove directory/inventory names.
1771
That is, delete files that are to be deleted, and put any files that
1772
need renaming into limbo. This must be done in strict child-to-parent
1775
If inventory_delta is None, no inventory delta generation is performed.
1777
tree_paths = list(self._tree_path_ids.iteritems())
1778
tree_paths.sort(reverse=True)
1779
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1781
for num, data in enumerate(tree_paths):
1782
path, trans_id = data
1783
child_pb.update('removing file', num, len(tree_paths))
1784
full_path = self._tree.abspath(path)
1785
if trans_id in self._removed_contents:
1786
delete_path = os.path.join(self._deletiondir, trans_id)
1787
mover.pre_delete(full_path, delete_path)
1788
elif (trans_id in self._new_name
1789
or trans_id in self._new_parent):
1791
mover.rename(full_path, self._limbo_name(trans_id))
1792
except errors.TransformRenameFailed, e:
1793
if e.errno != errno.ENOENT:
1796
self.rename_count += 1
1800
def _apply_insertions(self, mover):
1801
"""Perform tree operations that insert directory/inventory names.
1803
That is, create any files that need to be created, and restore from
1804
limbo any files that needed renaming. This must be done in strict
1805
parent-to-child order.
1807
If inventory_delta is None, no inventory delta is calculated, and
1808
no list of modified paths is returned.
1810
new_paths = self.new_paths(filesystem_only=True)
1812
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1814
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1816
for num, (path, trans_id) in enumerate(new_paths):
1818
child_pb.update('adding file', num, len(new_paths))
1819
full_path = self._tree.abspath(path)
1820
if trans_id in self._needs_rename:
1822
mover.rename(self._limbo_name(trans_id), full_path)
1823
except errors.TransformRenameFailed, e:
1824
# We may be renaming a dangling inventory id
1825
if e.errno != errno.ENOENT:
1828
self.rename_count += 1
1829
if (trans_id in self._new_contents or
1830
self.path_changed(trans_id)):
1831
if trans_id in self._new_contents:
1832
modified_paths.append(full_path)
1833
if trans_id in self._new_executability:
1834
self._set_executability(path, trans_id)
1837
self._new_contents.clear()
1838
return modified_paths
1841
class TransformPreview(DiskTreeTransform):
1842
"""A TreeTransform for generating preview trees.
1844
Unlike TreeTransform, this version works when the input tree is a
1845
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1846
unversioned files in the input tree.
1849
def __init__(self, tree, pb=None, case_sensitive=True):
1851
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1852
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1854
def canonical_path(self, path):
1857
def tree_kind(self, trans_id):
1858
path = self._tree_id_paths.get(trans_id)
1861
file_id = self._tree.path2id(path)
1863
return self._tree.kind(file_id)
1864
except errors.NoSuchFile:
1867
def _set_mode(self, trans_id, mode_id, typefunc):
1868
"""Set the mode of new file contents.
1869
The mode_id is the existing file to get the mode from (often the same
1870
as trans_id). The operation is only performed if there's a mode match
1871
according to typefunc.
1873
# is it ok to ignore this? probably
1876
def iter_tree_children(self, parent_id):
1877
"""Iterate through the entry's tree children, if any"""
1879
path = self._tree_id_paths[parent_id]
1882
file_id = self.tree_file_id(parent_id)
1885
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1886
children = getattr(entry, 'children', {})
1887
for child in children:
1888
childpath = joinpath(path, child)
1889
yield self.trans_id_tree_path(childpath)
1891
def new_orphan(self, trans_id, parent_id):
1892
raise NotImplementedError(self.new_orphan)
1895
class _PreviewTree(tree.Tree):
1896
"""Partial implementation of Tree to support show_diff_trees"""
1898
def __init__(self, transform):
1899
self._transform = transform
1900
self._final_paths = FinalPaths(transform)
1901
self.__by_parent = None
1902
self._parent_ids = []
1903
self._all_children_cache = {}
1904
self._path2trans_id_cache = {}
1905
self._final_name_cache = {}
1906
self._iter_changes_cache = dict((c[0], c) for c in
1907
self._transform.iter_changes())
1909
def _content_change(self, file_id):
1910
"""Return True if the content of this file changed"""
1911
changes = self._iter_changes_cache.get(file_id)
1912
# changes[2] is true if the file content changed. See
1913
# InterTree.iter_changes.
1914
return (changes is not None and changes[2])
1916
def _get_repository(self):
1917
repo = getattr(self._transform._tree, '_repository', None)
1919
repo = self._transform._tree.branch.repository
1922
def _iter_parent_trees(self):
1923
for revision_id in self.get_parent_ids():
1925
yield self.revision_tree(revision_id)
1926
except errors.NoSuchRevisionInTree:
1927
yield self._get_repository().revision_tree(revision_id)
1929
def _get_file_revision(self, file_id, vf, tree_revision):
1930
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
1931
self._iter_parent_trees()]
1932
vf.add_lines((file_id, tree_revision), parent_keys,
1933
self.get_file_lines(file_id))
1934
repo = self._get_repository()
1935
base_vf = repo.texts
1936
if base_vf not in vf.fallback_versionedfiles:
1937
vf.fallback_versionedfiles.append(base_vf)
1938
return tree_revision
1940
def _stat_limbo_file(self, file_id):
1941
trans_id = self._transform.trans_id_file_id(file_id)
1942
name = self._transform._limbo_name(trans_id)
1943
return os.lstat(name)
1946
def _by_parent(self):
1947
if self.__by_parent is None:
1948
self.__by_parent = self._transform.by_parent()
1949
return self.__by_parent
1951
def _comparison_data(self, entry, path):
1952
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
1953
if kind == 'missing':
1957
file_id = self._transform.final_file_id(self._path2trans_id(path))
1958
executable = self.is_executable(file_id, path)
1959
return kind, executable, None
1961
def is_locked(self):
1964
def lock_read(self):
1965
# Perhaps in theory, this should lock the TreeTransform?
1972
def inventory(self):
1973
"""This Tree does not use inventory as its backing data."""
1974
raise NotImplementedError(_PreviewTree.inventory)
1976
def get_root_id(self):
1977
return self._transform.final_file_id(self._transform.root)
1979
def all_file_ids(self):
1980
tree_ids = set(self._transform._tree.all_file_ids())
1981
tree_ids.difference_update(self._transform.tree_file_id(t)
1982
for t in self._transform._removed_id)
1983
tree_ids.update(self._transform._new_id.values())
1987
return iter(self.all_file_ids())
1989
def _has_id(self, file_id, fallback_check):
1990
if file_id in self._transform._r_new_id:
1992
elif file_id in set([self._transform.tree_file_id(trans_id) for
1993
trans_id in self._transform._removed_id]):
1996
return fallback_check(file_id)
1998
def has_id(self, file_id):
1999
return self._has_id(file_id, self._transform._tree.has_id)
2001
def has_or_had_id(self, file_id):
2002
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2004
def _path2trans_id(self, path):
2005
# We must not use None here, because that is a valid value to store.
2006
trans_id = self._path2trans_id_cache.get(path, object)
2007
if trans_id is not object:
2009
segments = splitpath(path)
2010
cur_parent = self._transform.root
2011
for cur_segment in segments:
2012
for child in self._all_children(cur_parent):
2013
final_name = self._final_name_cache.get(child)
2014
if final_name is None:
2015
final_name = self._transform.final_name(child)
2016
self._final_name_cache[child] = final_name
2017
if final_name == cur_segment:
2021
self._path2trans_id_cache[path] = None
2023
self._path2trans_id_cache[path] = cur_parent
2026
def path2id(self, path):
2027
return self._transform.final_file_id(self._path2trans_id(path))
2029
def id2path(self, file_id):
2030
trans_id = self._transform.trans_id_file_id(file_id)
2032
return self._final_paths._determine_path(trans_id)
2034
raise errors.NoSuchId(self, file_id)
2036
def _all_children(self, trans_id):
2037
children = self._all_children_cache.get(trans_id)
2038
if children is not None:
2040
children = set(self._transform.iter_tree_children(trans_id))
2041
# children in the _new_parent set are provided by _by_parent.
2042
children.difference_update(self._transform._new_parent.keys())
2043
children.update(self._by_parent.get(trans_id, []))
2044
self._all_children_cache[trans_id] = children
2047
def iter_children(self, file_id):
2048
trans_id = self._transform.trans_id_file_id(file_id)
2049
for child_trans_id in self._all_children(trans_id):
2050
yield self._transform.final_file_id(child_trans_id)
2053
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2054
in self._transform._tree.extras())
2055
possible_extras.update(self._transform._new_contents)
2056
possible_extras.update(self._transform._removed_id)
2057
for trans_id in possible_extras:
2058
if self._transform.final_file_id(trans_id) is None:
2059
yield self._final_paths._determine_path(trans_id)
2061
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2062
yield_parents=False):
2063
for trans_id, parent_file_id in ordered_entries:
2064
file_id = self._transform.final_file_id(trans_id)
2067
if (specific_file_ids is not None
2068
and file_id not in specific_file_ids):
2070
kind = self._transform.final_kind(trans_id)
2072
kind = self._transform._tree.stored_kind(file_id)
2073
new_entry = inventory.make_entry(
2075
self._transform.final_name(trans_id),
2076
parent_file_id, file_id)
2077
yield new_entry, trans_id
2079
def _list_files_by_dir(self):
2080
todo = [ROOT_PARENT]
2082
while len(todo) > 0:
2084
parent_file_id = self._transform.final_file_id(parent)
2085
children = list(self._all_children(parent))
2086
paths = dict(zip(children, self._final_paths.get_paths(children)))
2087
children.sort(key=paths.get)
2088
todo.extend(reversed(children))
2089
for trans_id in children:
2090
ordered_ids.append((trans_id, parent_file_id))
2093
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2094
# This may not be a maximally efficient implementation, but it is
2095
# reasonably straightforward. An implementation that grafts the
2096
# TreeTransform changes onto the tree's iter_entries_by_dir results
2097
# might be more efficient, but requires tricky inferences about stack
2099
ordered_ids = self._list_files_by_dir()
2100
for entry, trans_id in self._make_inv_entries(ordered_ids,
2101
specific_file_ids, yield_parents=yield_parents):
2102
yield unicode(self._final_paths.get_path(trans_id)), entry
2104
def _iter_entries_for_dir(self, dir_path):
2105
"""Return path, entry for items in a directory without recursing down."""
2106
dir_file_id = self.path2id(dir_path)
2108
for file_id in self.iter_children(dir_file_id):
2109
trans_id = self._transform.trans_id_file_id(file_id)
2110
ordered_ids.append((trans_id, file_id))
2111
for entry, trans_id in self._make_inv_entries(ordered_ids):
2112
yield unicode(self._final_paths.get_path(trans_id)), entry
2114
def list_files(self, include_root=False, from_dir=None, recursive=True):
2115
"""See WorkingTree.list_files."""
2116
# XXX This should behave like WorkingTree.list_files, but is really
2117
# more like RevisionTree.list_files.
2121
prefix = from_dir + '/'
2122
entries = self.iter_entries_by_dir()
2123
for path, entry in entries:
2124
if entry.name == '' and not include_root:
2127
if not path.startswith(prefix):
2129
path = path[len(prefix):]
2130
yield path, 'V', entry.kind, entry.file_id, entry
2132
if from_dir is None and include_root is True:
2133
root_entry = inventory.make_entry('directory', '',
2134
ROOT_PARENT, self.get_root_id())
2135
yield '', 'V', 'directory', root_entry.file_id, root_entry
2136
entries = self._iter_entries_for_dir(from_dir or '')
2137
for path, entry in entries:
2138
yield path, 'V', entry.kind, entry.file_id, entry
2140
def kind(self, file_id):
2141
trans_id = self._transform.trans_id_file_id(file_id)
2142
return self._transform.final_kind(trans_id)
2144
def stored_kind(self, file_id):
2145
trans_id = self._transform.trans_id_file_id(file_id)
2147
return self._transform._new_contents[trans_id]
2149
return self._transform._tree.stored_kind(file_id)
2151
def get_file_mtime(self, file_id, path=None):
2152
"""See Tree.get_file_mtime"""
2153
if not self._content_change(file_id):
2154
return self._transform._tree.get_file_mtime(file_id)
2155
return self._stat_limbo_file(file_id).st_mtime
2157
def _file_size(self, entry, stat_value):
2158
return self.get_file_size(entry.file_id)
2160
def get_file_size(self, file_id):
2161
"""See Tree.get_file_size"""
2162
if self.kind(file_id) == 'file':
2163
return self._transform._tree.get_file_size(file_id)
2167
def get_file_sha1(self, file_id, path=None, stat_value=None):
2168
trans_id = self._transform.trans_id_file_id(file_id)
2169
kind = self._transform._new_contents.get(trans_id)
2171
return self._transform._tree.get_file_sha1(file_id)
2173
fileobj = self.get_file(file_id)
2175
return sha_file(fileobj)
2179
def is_executable(self, file_id, path=None):
2182
trans_id = self._transform.trans_id_file_id(file_id)
2184
return self._transform._new_executability[trans_id]
2187
return self._transform._tree.is_executable(file_id, path)
2189
if e.errno == errno.ENOENT:
2192
except errors.NoSuchId:
2195
def path_content_summary(self, path):
2196
trans_id = self._path2trans_id(path)
2197
tt = self._transform
2198
tree_path = tt._tree_id_paths.get(trans_id)
2199
kind = tt._new_contents.get(trans_id)
2201
if tree_path is None or trans_id in tt._removed_contents:
2202
return 'missing', None, None, None
2203
summary = tt._tree.path_content_summary(tree_path)
2204
kind, size, executable, link_or_sha1 = summary
2207
limbo_name = tt._limbo_name(trans_id)
2208
if trans_id in tt._new_reference_revision:
2209
kind = 'tree-reference'
2211
statval = os.lstat(limbo_name)
2212
size = statval.st_size
2213
if not supports_executable():
2216
executable = statval.st_mode & S_IEXEC
2220
if kind == 'symlink':
2221
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2222
executable = tt._new_executability.get(trans_id, executable)
2223
return kind, size, executable, link_or_sha1
2225
def iter_changes(self, from_tree, include_unchanged=False,
2226
specific_files=None, pb=None, extra_trees=None,
2227
require_versioned=True, want_unversioned=False):
2228
"""See InterTree.iter_changes.
2230
This has a fast path that is only used when the from_tree matches
2231
the transform tree, and no fancy options are supplied.
2233
if (from_tree is not self._transform._tree or include_unchanged or
2234
specific_files or want_unversioned):
2235
return tree.InterTree(from_tree, self).iter_changes(
2236
include_unchanged=include_unchanged,
2237
specific_files=specific_files,
2239
extra_trees=extra_trees,
2240
require_versioned=require_versioned,
2241
want_unversioned=want_unversioned)
2242
if want_unversioned:
2243
raise ValueError('want_unversioned is not supported')
2244
return self._transform.iter_changes()
2246
def get_file(self, file_id, path=None):
2247
"""See Tree.get_file"""
2248
if not self._content_change(file_id):
2249
return self._transform._tree.get_file(file_id, path)
2250
trans_id = self._transform.trans_id_file_id(file_id)
2251
name = self._transform._limbo_name(trans_id)
2252
return open(name, 'rb')
2254
def get_file_with_stat(self, file_id, path=None):
2255
return self.get_file(file_id, path), None
2257
def annotate_iter(self, file_id,
2258
default_revision=_mod_revision.CURRENT_REVISION):
2259
changes = self._iter_changes_cache.get(file_id)
2263
changed_content, versioned, kind = (changes[2], changes[3],
2267
get_old = (kind[0] == 'file' and versioned[0])
2269
old_annotation = self._transform._tree.annotate_iter(file_id,
2270
default_revision=default_revision)
2274
return old_annotation
2275
if not changed_content:
2276
return old_annotation
2277
# TODO: This is doing something similar to what WT.annotate_iter is
2278
# doing, however it fails slightly because it doesn't know what
2279
# the *other* revision_id is, so it doesn't know how to give the
2280
# other as the origin for some lines, they all get
2281
# 'default_revision'
2282
# It would be nice to be able to use the new Annotator based
2283
# approach, as well.
2284
return annotate.reannotate([old_annotation],
2285
self.get_file(file_id).readlines(),
2288
def get_symlink_target(self, file_id):
2289
"""See Tree.get_symlink_target"""
2290
if not self._content_change(file_id):
2291
return self._transform._tree.get_symlink_target(file_id)
2292
trans_id = self._transform.trans_id_file_id(file_id)
2293
name = self._transform._limbo_name(trans_id)
2294
return osutils.readlink(name)
2296
def walkdirs(self, prefix=''):
2297
pending = [self._transform.root]
2298
while len(pending) > 0:
2299
parent_id = pending.pop()
2302
prefix = prefix.rstrip('/')
2303
parent_path = self._final_paths.get_path(parent_id)
2304
parent_file_id = self._transform.final_file_id(parent_id)
2305
for child_id in self._all_children(parent_id):
2306
path_from_root = self._final_paths.get_path(child_id)
2307
basename = self._transform.final_name(child_id)
2308
file_id = self._transform.final_file_id(child_id)
2309
kind = self._transform.final_kind(child_id)
2310
if kind is not None:
2311
versioned_kind = kind
2314
versioned_kind = self._transform._tree.stored_kind(file_id)
2315
if versioned_kind == 'directory':
2316
subdirs.append(child_id)
2317
children.append((path_from_root, basename, kind, None,
2318
file_id, versioned_kind))
2320
if parent_path.startswith(prefix):
2321
yield (parent_path, parent_file_id), children
2322
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2325
def get_parent_ids(self):
2326
return self._parent_ids
2328
def set_parent_ids(self, parent_ids):
2329
self._parent_ids = parent_ids
2331
def get_revision_tree(self, revision_id):
2332
return self._transform._tree.get_revision_tree(revision_id)
864
2335
def joinpath(parent, child):
865
2336
"""Join tree-relative paths, handling the tree root specially"""
866
2337
if parent is None or parent == "":
896
2367
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2368
return self._known_paths[trans_id]
2370
def get_paths(self, trans_ids):
2371
return [(self.get_path(t), t) for t in trans_ids]
899
2375
def topology_sorted_ids(tree):
900
2376
"""Determine the topological order of the ids in a tree"""
901
2377
file_ids = list(tree)
902
2378
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2382
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2383
delta_from_tree=False):
2384
"""Create working tree for a branch, using a TreeTransform.
2386
This function should be used on empty trees, having a tree root at most.
2387
(see merge and revert functionality for working with existing trees)
2389
Existing files are handled like so:
2391
- Existing bzrdirs take precedence over creating new items. They are
2392
created as '%s.diverted' % name.
2393
- Otherwise, if the content on disk matches the content we are building,
2394
it is silently replaced.
2395
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2397
:param tree: The tree to convert wt into a copy of
2398
:param wt: The working tree that files will be placed into
2399
:param accelerator_tree: A tree which can be used for retrieving file
2400
contents more quickly than tree itself, i.e. a workingtree. tree
2401
will be used for cases where accelerator_tree's content is different.
2402
:param hardlink: If true, hard-link files to accelerator_tree, where
2403
possible. accelerator_tree must implement abspath, i.e. be a
2405
:param delta_from_tree: If true, build_tree may use the input Tree to
2406
generate the inventory delta.
2408
wt.lock_tree_write()
2412
if accelerator_tree is not None:
2413
accelerator_tree.lock_read()
2415
return _build_tree(tree, wt, accelerator_tree, hardlink,
2418
if accelerator_tree is not None:
2419
accelerator_tree.unlock()
2426
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2427
"""See build_tree."""
2428
for num, _unused in enumerate(wt.all_file_ids()):
2429
if num > 0: # more than just a root
2430
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2431
file_trans_id = {}
908
2432
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
909
2433
pp = ProgressPhase("Build phase", 2, top_pb)
2434
if tree.inventory.root is not None:
2435
# This is kind of a hack: we should be altering the root
2436
# as part of the regular tree shape diff logic.
2437
# The conditional test here is to avoid doing an
2438
# expensive operation (flush) every time the root id
2439
# is set within the tree, nor setting the root and thus
2440
# marking the tree as dirty, because we use two different
2441
# idioms here: tree interfaces and inventory interfaces.
2442
if wt.get_root_id() != tree.get_root_id():
2443
wt.set_root_id(tree.get_root_id())
910
2445
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
2449
file_trans_id[wt.get_root_id()] = \
2450
tt.trans_id_tree_file_id(wt.get_root_id())
915
2451
pb = bzrlib.ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2453
deferred_contents = []
2455
total = len(tree.inventory)
2457
precomputed_delta = []
2459
precomputed_delta = None
2460
# Check if tree inventory has content. If so, we populate
2461
# existing_files with the directory content. If there are no
2462
# entries we skip populating existing_files as its not used.
2463
# This improves performance and unncessary work on large
2464
# directory trees. (#501307)
2466
existing_files = set()
2467
for dir, files in wt.walkdirs():
2468
existing_files.update(f[0] for f in files)
2469
for num, (tree_path, entry) in \
2470
enumerate(tree.inventory.iter_entries_by_dir()):
2471
pb.update("Building tree", num - len(deferred_contents), total)
920
2472
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2475
file_id = entry.file_id
2477
precomputed_delta.append((None, tree_path, file_id, entry))
2478
if tree_path in existing_files:
2479
target_path = wt.abspath(tree_path)
2480
kind = file_kind(target_path)
2481
if kind == "directory":
2483
bzrdir.BzrDir.open(target_path)
2484
except errors.NotBranchError:
2488
if (file_id not in divert and
2489
_content_match(tree, entry, file_id, kind,
2491
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2492
if kind == 'directory':
924
2494
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2495
if entry.kind == 'file':
2496
# We *almost* replicate new_by_entry, so that we can defer
2497
# getting the file text, and get them all at once.
2498
trans_id = tt.create_path(entry.name, parent_id)
2499
file_trans_id[file_id] = trans_id
2500
tt.version_file(file_id, trans_id)
2501
executable = tree.is_executable(file_id, tree_path)
2503
tt.set_executability(executable, trans_id)
2504
trans_data = (trans_id, tree_path)
2505
deferred_contents.append((file_id, trans_data))
2507
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2510
new_trans_id = file_trans_id[file_id]
2511
old_parent = tt.trans_id_tree_path(tree_path)
2512
_reparent_children(tt, old_parent, new_trans_id)
2513
offset = num + 1 - len(deferred_contents)
2514
_create_files(tt, tree, deferred_contents, pb, offset,
2515
accelerator_tree, hardlink)
2519
divert_trans = set(file_trans_id[f] for f in divert)
2520
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2521
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2522
if len(raw_conflicts) > 0:
2523
precomputed_delta = None
2524
conflicts = cook_conflicts(raw_conflicts, tt)
2525
for conflict in conflicts:
2528
wt.add_conflicts(conflicts)
2529
except errors.UnsupportedOperation:
2531
result = tt.apply(no_conflicts=True,
2532
precomputed_delta=precomputed_delta)
933
2535
top_pb.finished()
2539
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2541
total = len(desired_files) + offset
2543
if accelerator_tree is None:
2544
new_desired_files = desired_files
2546
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2547
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2548
in iter if not (c or e[0] != e[1])]
2549
if accelerator_tree.supports_content_filtering():
2550
unchanged = [(f, p) for (f, p) in unchanged
2551
if not accelerator_tree.iter_search_rules([p]).next()]
2552
unchanged = dict(unchanged)
2553
new_desired_files = []
2555
for file_id, (trans_id, tree_path) in desired_files:
2556
accelerator_path = unchanged.get(file_id)
2557
if accelerator_path is None:
2558
new_desired_files.append((file_id, (trans_id, tree_path)))
2560
pb.update('Adding file contents', count + offset, total)
2562
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2565
contents = accelerator_tree.get_file(file_id, accelerator_path)
2566
if wt.supports_content_filtering():
2567
filters = wt._content_filter_stack(tree_path)
2568
contents = filtered_output_bytes(contents, filters,
2569
ContentFilterContext(tree_path, tree))
2571
tt.create_file(contents, trans_id)
2575
except AttributeError:
2576
# after filtering, contents may no longer be file-like
2580
for count, ((trans_id, tree_path), contents) in enumerate(
2581
tree.iter_files_bytes(new_desired_files)):
2582
if wt.supports_content_filtering():
2583
filters = wt._content_filter_stack(tree_path)
2584
contents = filtered_output_bytes(contents, filters,
2585
ContentFilterContext(tree_path, tree))
2586
tt.create_file(contents, trans_id)
2587
pb.update('Adding file contents', count + offset, total)
2590
def _reparent_children(tt, old_parent, new_parent):
2591
for child in tt.iter_tree_children(old_parent):
2592
tt.adjust_path(tt.final_name(child), new_parent, child)
2595
def _reparent_transform_children(tt, old_parent, new_parent):
2596
by_parent = tt.by_parent()
2597
for child in by_parent[old_parent]:
2598
tt.adjust_path(tt.final_name(child), new_parent, child)
2599
return by_parent[old_parent]
2602
def _content_match(tree, entry, file_id, kind, target_path):
2603
if entry.kind != kind:
2605
if entry.kind == "directory":
2607
if entry.kind == "file":
2608
f = file(target_path, 'rb')
2610
if tree.get_file_text(file_id) == f.read():
2614
elif entry.kind == "symlink":
2615
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2620
def resolve_checkout(tt, conflicts, divert):
2621
new_conflicts = set()
2622
for c_type, conflict in ((c[0], c) for c in conflicts):
2623
# Anything but a 'duplicate' would indicate programmer error
2624
if c_type != 'duplicate':
2625
raise AssertionError(c_type)
2626
# Now figure out which is new and which is old
2627
if tt.new_contents(conflict[1]):
2628
new_file = conflict[1]
2629
old_file = conflict[2]
2631
new_file = conflict[2]
2632
old_file = conflict[1]
2634
# We should only get here if the conflict wasn't completely
2636
final_parent = tt.final_parent(old_file)
2637
if new_file in divert:
2638
new_name = tt.final_name(old_file)+'.diverted'
2639
tt.adjust_path(new_name, final_parent, new_file)
2640
new_conflicts.add((c_type, 'Diverted to',
2641
new_file, old_file))
2643
new_name = tt.final_name(old_file)+'.moved'
2644
tt.adjust_path(new_name, final_parent, old_file)
2645
new_conflicts.add((c_type, 'Moved existing file to',
2646
old_file, new_file))
2647
return new_conflicts
935
2650
def new_by_entry(tt, entry, parent_id, tree):
936
2651
"""Create a new file according to its inventory entry"""
1046
2735
working_kind = working_tree.kind(file_id)
1047
2736
has_contents = True
1049
if e.errno != errno.ENOENT:
1051
2738
has_contents = False
1052
2739
contents_mod = True
1053
2740
meta_mod = False
1054
2741
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
2742
if entry.kind != working_kind:
1059
2743
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
2745
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
2747
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
2748
cur_entry._forget_tree_state()
1065
2749
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2752
def revert(working_tree, target_tree, filenames, backups=False,
2753
pb=None, change_reporter=None):
1070
2754
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
2755
target_tree.lock_read()
2756
pb = ui.ui_factory.nested_progress_bar()
1075
2757
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2759
pp = ProgressPhase("Revert phase", 3, pb)
2760
conflicts, merge_modified = _prepare_revert_transform(
2761
working_tree, target_tree, tt, filenames, backups, pp)
2763
change_reporter = delta._ChangeReporter(
2764
unversioned_filter=working_tree.is_ignored)
2765
delta.report_changes(tt.iter_changes(), change_reporter)
2766
for conflict in conflicts:
2770
working_tree.set_merge_modified(merge_modified)
2772
target_tree.unlock()
2778
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2779
backups, pp, basis_tree=None,
2780
merge_modified=None):
2781
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2783
if merge_modified is None:
2784
merge_modified = working_tree.merge_modified()
2785
merge_modified = _alter_files(working_tree, target_tree, tt,
2786
child_pb, filenames, backups,
2787
merge_modified, basis_tree)
2790
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2792
raw_conflicts = resolve_conflicts(tt, child_pb,
2793
lambda t, c: conflict_pass(t, c, target_tree))
2796
conflicts = cook_conflicts(raw_conflicts, tt)
2797
return conflicts, merge_modified
2800
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2801
backups, merge_modified, basis_tree=None):
2802
if basis_tree is not None:
2803
basis_tree.lock_read()
2804
change_list = target_tree.iter_changes(working_tree,
2805
specific_files=specific_files, pb=pb)
2806
if target_tree.get_root_id() is None:
2812
for id_num, (file_id, path, changed_content, versioned, parent, name,
2813
kind, executable) in enumerate(change_list):
2814
if skip_root and file_id[0] is not None and parent[0] is None:
2816
trans_id = tt.trans_id_file_id(file_id)
2819
keep_content = False
2820
if kind[0] == 'file' and (backups or kind[1] is None):
2821
wt_sha1 = working_tree.get_file_sha1(file_id)
2822
if merge_modified.get(file_id) != wt_sha1:
2823
# acquire the basis tree lazily to prevent the
2824
# expense of accessing it when it's not needed ?
2825
# (Guessing, RBC, 200702)
2826
if basis_tree is None:
2827
basis_tree = working_tree.basis_tree()
2828
basis_tree.lock_read()
2829
if file_id in basis_tree:
2830
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2832
elif kind[1] is None and not versioned[1]:
2834
if kind[0] is not None:
2835
if not keep_content:
1121
2836
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2837
elif kind[1] is not None:
2838
parent_trans_id = tt.trans_id_file_id(parent[0])
2839
backup_name = tt._available_backup_name(
2840
name[0], parent_trans_id)
2841
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2842
new_trans_id = tt.create_path(name[0], parent_trans_id)
2843
if versioned == (True, True):
2844
tt.unversion_file(trans_id)
2845
tt.version_file(file_id, new_trans_id)
2846
# New contents should have the same unix perms as old
2849
trans_id = new_trans_id
2850
if kind[1] in ('directory', 'tree-reference'):
2851
tt.create_directory(trans_id)
2852
if kind[1] == 'tree-reference':
2853
revision = target_tree.get_reference_revision(file_id,
2855
tt.set_tree_reference(revision, trans_id)
2856
elif kind[1] == 'symlink':
2857
tt.create_symlink(target_tree.get_symlink_target(file_id),
2859
elif kind[1] == 'file':
2860
deferred_files.append((file_id, (trans_id, mode_id)))
2861
if basis_tree is None:
2862
basis_tree = working_tree.basis_tree()
2863
basis_tree.lock_read()
2864
new_sha1 = target_tree.get_file_sha1(file_id)
2865
if (file_id in basis_tree and new_sha1 ==
2866
basis_tree.get_file_sha1(file_id)):
2867
if file_id in merge_modified:
2868
del merge_modified[file_id]
2870
merge_modified[file_id] = new_sha1
2872
# preserve the execute bit when backing up
2873
if keep_content and executable[0] == executable[1]:
2874
tt.set_executability(executable[1], trans_id)
2875
elif kind[1] is not None:
2876
raise AssertionError(kind[1])
2877
if versioned == (False, True):
2878
tt.version_file(file_id, trans_id)
2879
if versioned == (True, False):
2880
tt.unversion_file(trans_id)
2881
if (name[1] is not None and
2882
(name[0] != name[1] or parent[0] != parent[1])):
2883
if name[1] == '' and parent[1] is None:
2884
parent_trans = ROOT_PARENT
2886
parent_trans = tt.trans_id_file_id(parent[1])
2887
if parent[0] is None and versioned[0]:
2888
tt.adjust_root_path(name[1], parent_trans)
2890
tt.adjust_path(name[1], parent_trans, trans_id)
2891
if executable[0] != executable[1] and kind[1] == "file":
2892
tt.set_executability(executable[1], trans_id)
2893
if working_tree.supports_content_filtering():
2894
for index, ((trans_id, mode_id), bytes) in enumerate(
2895
target_tree.iter_files_bytes(deferred_files)):
2896
file_id = deferred_files[index][0]
2897
# We're reverting a tree to the target tree so using the
2898
# target tree to find the file path seems the best choice
2899
# here IMO - Ian C 27/Oct/2009
2900
filter_tree_path = target_tree.id2path(file_id)
2901
filters = working_tree._content_filter_stack(filter_tree_path)
2902
bytes = filtered_output_bytes(bytes, filters,
2903
ContentFilterContext(filter_tree_path, working_tree))
2904
tt.create_file(bytes, trans_id, mode_id)
2906
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
2908
tt.create_file(bytes, trans_id, mode_id)
2909
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
2911
if basis_tree is not None:
2913
return merge_modified
2916
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
2917
"""Make many conflict-resolution attempts, but die if they fail"""
2918
if pass_func is None:
2919
pass_func = conflict_pass
1145
2920
new_conflicts = set()
2921
pb = ui.ui_factory.nested_progress_bar()
1147
2923
for n in range(10):
1148
2924
pb.update('Resolution pass', n+1, 10)
1149
2925
conflicts = tt.find_conflicts()
1150
2926
if len(conflicts) == 0:
1151
2927
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
2928
new_conflicts.update(pass_func(tt, conflicts))
1153
2929
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
2934
def conflict_pass(tt, conflicts, path_tree=None):
2935
"""Resolve some classes of conflicts.
2937
:param tt: The transform to resolve conflicts in
2938
:param conflicts: The conflicts to resolve
2939
:param path_tree: A Tree to get supplemental paths from
1160
2941
new_conflicts = set()
1161
2942
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
2943
if c_type == 'duplicate id':