861
796
self.create_symlink(target, trans_id)
799
def new_orphan(self, trans_id, parent_id):
800
"""Schedule an item to be orphaned.
802
When a directory is about to be removed, its children, if they are not
803
versioned are moved out of the way: they don't have a parent anymore.
805
:param trans_id: The trans_id of the existing item.
806
:param parent_id: The parent trans_id of the item.
808
raise NotImplementedError(self.new_orphan)
810
def _get_potential_orphans(self, dir_id):
811
"""Find the potential orphans in a directory.
813
A directory can't be safely deleted if there are versioned files in it.
814
If all the contained files are unversioned then they can be orphaned.
816
The 'None' return value means that the directory contains at least one
817
versioned file and should not be deleted.
819
:param dir_id: The directory trans id.
821
:return: A list of the orphan trans ids or None if at least one
822
versioned file is present.
825
# Find the potential orphans, stop if one item should be kept
826
for child_tid in self.by_parent()[dir_id]:
827
if child_tid in self._removed_contents:
828
# The child is removed as part of the transform. Since it was
829
# versioned before, it's not an orphan
831
elif self.final_file_id(child_tid) is None:
832
# The child is not versioned
833
orphans.append(child_tid)
835
# We have a versioned file here, searching for orphans is
841
def _affected_ids(self):
842
"""Return the set of transform ids affected by the transform"""
843
trans_ids = set(self._removed_id)
844
trans_ids.update(self._new_id.keys())
845
trans_ids.update(self._removed_contents)
846
trans_ids.update(self._new_contents.keys())
847
trans_ids.update(self._new_executability.keys())
848
trans_ids.update(self._new_name.keys())
849
trans_ids.update(self._new_parent.keys())
852
def _get_file_id_maps(self):
853
"""Return mapping of file_ids to trans_ids in the to and from states"""
854
trans_ids = self._affected_ids()
857
# Build up two dicts: trans_ids associated with file ids in the
858
# FROM state, vs the TO state.
859
for trans_id in trans_ids:
860
from_file_id = self.tree_file_id(trans_id)
861
if from_file_id is not None:
862
from_trans_ids[from_file_id] = trans_id
863
to_file_id = self.final_file_id(trans_id)
864
if to_file_id is not None:
865
to_trans_ids[to_file_id] = trans_id
866
return from_trans_ids, to_trans_ids
868
def _from_file_data(self, from_trans_id, from_versioned, file_id):
869
"""Get data about a file in the from (tree) state
871
Return a (name, parent, kind, executable) tuple
873
from_path = self._tree_id_paths.get(from_trans_id)
875
# get data from working tree if versioned
876
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
877
from_name = from_entry.name
878
from_parent = from_entry.parent_id
881
if from_path is None:
882
# File does not exist in FROM state
886
# File exists, but is not versioned. Have to use path-
888
from_name = os.path.basename(from_path)
889
tree_parent = self.get_tree_parent(from_trans_id)
890
from_parent = self.tree_file_id(tree_parent)
891
if from_path is not None:
892
from_kind, from_executable, from_stats = \
893
self._tree._comparison_data(from_entry, from_path)
896
from_executable = False
897
return from_name, from_parent, from_kind, from_executable
899
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
900
"""Get data about a file in the to (target) state
902
Return a (name, parent, kind, executable) tuple
904
to_name = self.final_name(to_trans_id)
905
to_kind = self.final_kind(to_trans_id)
906
to_parent = self.final_file_id(self.final_parent(to_trans_id))
907
if to_trans_id in self._new_executability:
908
to_executable = self._new_executability[to_trans_id]
909
elif to_trans_id == from_trans_id:
910
to_executable = from_executable
912
to_executable = False
913
return to_name, to_parent, to_kind, to_executable
915
def iter_changes(self):
916
"""Produce output in the same format as Tree.iter_changes.
918
Will produce nonsensical results if invoked while inventory/filesystem
919
conflicts (as reported by TreeTransform.find_conflicts()) are present.
921
This reads the Transform, but only reproduces changes involving a
922
file_id. Files that are not versioned in either of the FROM or TO
923
states are not reflected.
925
final_paths = FinalPaths(self)
926
from_trans_ids, to_trans_ids = self._get_file_id_maps()
928
# Now iterate through all active file_ids
929
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
931
from_trans_id = from_trans_ids.get(file_id)
932
# find file ids, and determine versioning state
933
if from_trans_id is None:
934
from_versioned = False
935
from_trans_id = to_trans_ids[file_id]
937
from_versioned = True
938
to_trans_id = to_trans_ids.get(file_id)
939
if to_trans_id is None:
941
to_trans_id = from_trans_id
945
from_name, from_parent, from_kind, from_executable = \
946
self._from_file_data(from_trans_id, from_versioned, file_id)
948
to_name, to_parent, to_kind, to_executable = \
949
self._to_file_data(to_trans_id, from_trans_id, from_executable)
951
if not from_versioned:
954
from_path = self._tree_id_paths.get(from_trans_id)
958
to_path = final_paths.get_path(to_trans_id)
959
if from_kind != to_kind:
961
elif to_kind in ('file', 'symlink') and (
962
to_trans_id != from_trans_id or
963
to_trans_id in self._new_contents):
965
if (not modified and from_versioned == to_versioned and
966
from_parent==to_parent and from_name == to_name and
967
from_executable == to_executable):
969
results.append((file_id, (from_path, to_path), modified,
970
(from_versioned, to_versioned),
971
(from_parent, to_parent),
972
(from_name, to_name),
973
(from_kind, to_kind),
974
(from_executable, to_executable)))
975
return iter(sorted(results, key=lambda x:x[1]))
977
def get_preview_tree(self):
978
"""Return a tree representing the result of the transform.
980
The tree is a snapshot, and altering the TreeTransform will invalidate
983
return _PreviewTree(self)
985
def commit(self, branch, message, merge_parents=None, strict=False,
986
timestamp=None, timezone=None, committer=None, authors=None,
987
revprops=None, revision_id=None):
988
"""Commit the result of this TreeTransform to a branch.
990
:param branch: The branch to commit to.
991
:param message: The message to attach to the commit.
992
:param merge_parents: Additional parent revision-ids specified by
994
:param strict: If True, abort the commit if there are unversioned
996
:param timestamp: if not None, seconds-since-epoch for the time and
997
date. (May be a float.)
998
:param timezone: Optional timezone for timestamp, as an offset in
1000
:param committer: Optional committer in email-id format.
1001
(e.g. "J Random Hacker <jrandom@example.com>")
1002
:param authors: Optional list of authors in email-id format.
1003
:param revprops: Optional dictionary of revision properties.
1004
:param revision_id: Optional revision id. (Specifying a revision-id
1005
may reduce performance for some non-native formats.)
1006
:return: The revision_id of the revision committed.
1008
self._check_malformed()
1010
unversioned = set(self._new_contents).difference(set(self._new_id))
1011
for trans_id in unversioned:
1012
if self.final_file_id(trans_id) is None:
1013
raise errors.StrictCommitFailed()
1015
revno, last_rev_id = branch.last_revision_info()
1016
if last_rev_id == _mod_revision.NULL_REVISION:
1017
if merge_parents is not None:
1018
raise ValueError('Cannot supply merge parents for first'
1022
parent_ids = [last_rev_id]
1023
if merge_parents is not None:
1024
parent_ids.extend(merge_parents)
1025
if self._tree.get_revision_id() != last_rev_id:
1026
raise ValueError('TreeTransform not based on branch basis: %s' %
1027
self._tree.get_revision_id())
1028
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1029
builder = branch.get_commit_builder(parent_ids,
1030
timestamp=timestamp,
1032
committer=committer,
1034
revision_id=revision_id)
1035
preview = self.get_preview_tree()
1036
list(builder.record_iter_changes(preview, last_rev_id,
1037
self.iter_changes()))
1038
builder.finish_inventory()
1039
revision_id = builder.commit(message)
1040
branch.set_last_revision_info(revno + 1, revision_id)
1043
def _text_parent(self, trans_id):
1044
file_id = self.tree_file_id(trans_id)
1046
if file_id is None or self._tree.kind(file_id) != 'file':
1048
except errors.NoSuchFile:
1052
def _get_parents_texts(self, trans_id):
1053
"""Get texts for compression parents of this file."""
1054
file_id = self._text_parent(trans_id)
1057
return (self._tree.get_file_text(file_id),)
1059
def _get_parents_lines(self, trans_id):
1060
"""Get lines for compression parents of this file."""
1061
file_id = self._text_parent(trans_id)
1064
return (self._tree.get_file_lines(file_id),)
1066
def serialize(self, serializer):
1067
"""Serialize this TreeTransform.
1069
:param serializer: A Serialiser like pack.ContainerSerializer.
1071
new_name = dict((k, v.encode('utf-8')) for k, v in
1072
self._new_name.items())
1073
new_executability = dict((k, int(v)) for k, v in
1074
self._new_executability.items())
1075
tree_path_ids = dict((k.encode('utf-8'), v)
1076
for k, v in self._tree_path_ids.items())
1078
'_id_number': self._id_number,
1079
'_new_name': new_name,
1080
'_new_parent': self._new_parent,
1081
'_new_executability': new_executability,
1082
'_new_id': self._new_id,
1083
'_tree_path_ids': tree_path_ids,
1084
'_removed_id': list(self._removed_id),
1085
'_removed_contents': list(self._removed_contents),
1086
'_non_present_ids': self._non_present_ids,
1088
yield serializer.bytes_record(bencode.bencode(attribs),
1090
for trans_id, kind in self._new_contents.items():
1092
lines = osutils.chunks_to_lines(
1093
self._read_file_chunks(trans_id))
1094
parents = self._get_parents_lines(trans_id)
1095
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1096
content = ''.join(mpdiff.to_patch())
1097
if kind == 'directory':
1099
if kind == 'symlink':
1100
content = self._read_symlink_target(trans_id)
1101
yield serializer.bytes_record(content, ((trans_id, kind),))
1103
def deserialize(self, records):
1104
"""Deserialize a stored TreeTransform.
1106
:param records: An iterable of (names, content) tuples, as per
1107
pack.ContainerPushParser.
1109
names, content = records.next()
1110
attribs = bencode.bdecode(content)
1111
self._id_number = attribs['_id_number']
1112
self._new_name = dict((k, v.decode('utf-8'))
1113
for k, v in attribs['_new_name'].items())
1114
self._new_parent = attribs['_new_parent']
1115
self._new_executability = dict((k, bool(v)) for k, v in
1116
attribs['_new_executability'].items())
1117
self._new_id = attribs['_new_id']
1118
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1119
self._tree_path_ids = {}
1120
self._tree_id_paths = {}
1121
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1122
path = bytepath.decode('utf-8')
1123
self._tree_path_ids[path] = trans_id
1124
self._tree_id_paths[trans_id] = path
1125
self._removed_id = set(attribs['_removed_id'])
1126
self._removed_contents = set(attribs['_removed_contents'])
1127
self._non_present_ids = attribs['_non_present_ids']
1128
for ((trans_id, kind),), content in records:
1130
mpdiff = multiparent.MultiParent.from_patch(content)
1131
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1132
self.create_file(lines, trans_id)
1133
if kind == 'directory':
1134
self.create_directory(trans_id)
1135
if kind == 'symlink':
1136
self.create_symlink(content.decode('utf-8'), trans_id)
1139
class DiskTreeTransform(TreeTransformBase):
1140
"""Tree transform storing its contents on disk."""
1142
def __init__(self, tree, limbodir, pb=None,
1143
case_sensitive=True):
1145
:param tree: The tree that will be transformed, but not necessarily
1147
:param limbodir: A directory where new files can be stored until
1148
they are installed in their proper places
1150
:param case_sensitive: If True, the target of the transform is
1151
case sensitive, not just case preserving.
1153
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1154
self._limbodir = limbodir
1155
self._deletiondir = None
1156
# A mapping of transform ids to their limbo filename
1157
self._limbo_files = {}
1158
# A mapping of transform ids to a set of the transform ids of children
1159
# that their limbo directory has
1160
self._limbo_children = {}
1161
# Map transform ids to maps of child filename to child transform id
1162
self._limbo_children_names = {}
1163
# List of transform ids that need to be renamed from limbo into place
1164
self._needs_rename = set()
1165
self._creation_mtime = None
1168
"""Release the working tree lock, if held, clean up limbo dir.
1170
This is required if apply has not been invoked, but can be invoked
1173
if self._tree is None:
1176
entries = [(self._limbo_name(t), t, k) for t, k in
1177
self._new_contents.iteritems()]
1178
entries.sort(reverse=True)
1179
for path, trans_id, kind in entries:
1182
delete_any(self._limbodir)
1184
# We don't especially care *why* the dir is immortal.
1185
raise ImmortalLimbo(self._limbodir)
1187
if self._deletiondir is not None:
1188
delete_any(self._deletiondir)
1190
raise errors.ImmortalPendingDeletion(self._deletiondir)
1192
TreeTransformBase.finalize(self)
1194
def _limbo_name(self, trans_id):
1195
"""Generate the limbo name of a file"""
1196
limbo_name = self._limbo_files.get(trans_id)
1197
if limbo_name is None:
1198
limbo_name = self._generate_limbo_path(trans_id)
1199
self._limbo_files[trans_id] = limbo_name
1202
def _generate_limbo_path(self, trans_id):
1203
"""Generate a limbo path using the trans_id as the relative path.
1205
This is suitable as a fallback, and when the transform should not be
1206
sensitive to the path encoding of the limbo directory.
1208
self._needs_rename.add(trans_id)
1209
return pathjoin(self._limbodir, trans_id)
1211
def adjust_path(self, name, parent, trans_id):
1212
previous_parent = self._new_parent.get(trans_id)
1213
previous_name = self._new_name.get(trans_id)
1214
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1215
if (trans_id in self._limbo_files and
1216
trans_id not in self._needs_rename):
1217
self._rename_in_limbo([trans_id])
1218
if previous_parent != parent:
1219
self._limbo_children[previous_parent].remove(trans_id)
1220
if previous_parent != parent or previous_name != name:
1221
del self._limbo_children_names[previous_parent][previous_name]
1223
def _rename_in_limbo(self, trans_ids):
1224
"""Fix limbo names so that the right final path is produced.
1226
This means we outsmarted ourselves-- we tried to avoid renaming
1227
these files later by creating them with their final names in their
1228
final parents. But now the previous name or parent is no longer
1229
suitable, so we have to rename them.
1231
Even for trans_ids that have no new contents, we must remove their
1232
entries from _limbo_files, because they are now stale.
1234
for trans_id in trans_ids:
1235
old_path = self._limbo_files.pop(trans_id)
1236
if trans_id not in self._new_contents:
1238
new_path = self._limbo_name(trans_id)
1239
os.rename(old_path, new_path)
1240
for descendant in self._limbo_descendants(trans_id):
1241
desc_path = self._limbo_files[descendant]
1242
desc_path = new_path + desc_path[len(old_path):]
1243
self._limbo_files[descendant] = desc_path
1245
def _limbo_descendants(self, trans_id):
1246
"""Return the set of trans_ids whose limbo paths descend from this."""
1247
descendants = set(self._limbo_children.get(trans_id, []))
1248
for descendant in list(descendants):
1249
descendants.update(self._limbo_descendants(descendant))
1252
def create_file(self, contents, trans_id, mode_id=None):
1253
"""Schedule creation of a new file.
1257
Contents is an iterator of strings, all of which will be written
1258
to the target destination.
1260
New file takes the permissions of any existing file with that id,
1261
unless mode_id is specified.
1263
name = self._limbo_name(trans_id)
1264
f = open(name, 'wb')
1267
unique_add(self._new_contents, trans_id, 'file')
1269
# Clean up the file, it never got registered so
1270
# TreeTransform.finalize() won't clean it up.
1275
f.writelines(contents)
1278
self._set_mtime(name)
1279
self._set_mode(trans_id, mode_id, S_ISREG)
1281
def _read_file_chunks(self, trans_id):
1282
cur_file = open(self._limbo_name(trans_id), 'rb')
1284
return cur_file.readlines()
1288
def _read_symlink_target(self, trans_id):
1289
return os.readlink(self._limbo_name(trans_id))
1291
def _set_mtime(self, path):
1292
"""All files that are created get the same mtime.
1294
This time is set by the first object to be created.
1296
if self._creation_mtime is None:
1297
self._creation_mtime = time.time()
1298
os.utime(path, (self._creation_mtime, self._creation_mtime))
1300
def create_hardlink(self, path, trans_id):
1301
"""Schedule creation of a hard link"""
1302
name = self._limbo_name(trans_id)
1306
if e.errno != errno.EPERM:
1308
raise errors.HardLinkNotSupported(path)
1310
unique_add(self._new_contents, trans_id, 'file')
1312
# Clean up the file, it never got registered so
1313
# TreeTransform.finalize() won't clean it up.
1317
def create_directory(self, trans_id):
1318
"""Schedule creation of a new directory.
1320
See also new_directory.
1322
os.mkdir(self._limbo_name(trans_id))
1323
unique_add(self._new_contents, trans_id, 'directory')
1325
def create_symlink(self, target, trans_id):
1326
"""Schedule creation of a new symbolic link.
1328
target is a bytestring.
1329
See also new_symlink.
1332
os.symlink(target, self._limbo_name(trans_id))
1333
unique_add(self._new_contents, trans_id, 'symlink')
1336
path = FinalPaths(self).get_path(trans_id)
1339
raise UnableCreateSymlink(path=path)
1341
def cancel_creation(self, trans_id):
1342
"""Cancel the creation of new file contents."""
1343
del self._new_contents[trans_id]
1344
children = self._limbo_children.get(trans_id)
1345
# if this is a limbo directory with children, move them before removing
1347
if children is not None:
1348
self._rename_in_limbo(children)
1349
del self._limbo_children[trans_id]
1350
del self._limbo_children_names[trans_id]
1351
delete_any(self._limbo_name(trans_id))
1353
def new_orphan(self, trans_id, parent_id):
1354
# FIXME: There is no tree config, so we use the branch one (it's weird
1355
# to define it this way as orphaning can only occur in a working tree,
1356
# but that's all we have (for now). It will find the option in
1357
# locations.conf or bazaar.conf though) -- vila 20100916
1358
conf = self._tree.branch.get_config()
1359
conf_var_name = 'bzr.transform.orphan_policy'
1360
orphan_policy = conf.get_user_option(conf_var_name)
1361
default_policy = orphaning_registry.default_key
1362
if orphan_policy is None:
1363
orphan_policy = default_policy
1364
if orphan_policy not in orphaning_registry:
1365
trace.warning('%s (from %s) is not a known policy, defaulting to %s'
1366
% (orphan_policy, conf_var_name, default_policy))
1367
orphan_policy = default_policy
1368
handle_orphan = orphaning_registry.get(orphan_policy)
1369
handle_orphan(self, trans_id, parent_id)
1372
class OrphaningError(errors.BzrError):
1374
# Only bugs could lead to such exception being seen by the user
1375
internal_error = True
1376
_fmt = "Error while orphaning %s in %s directory"
1378
def __init__(self, orphan, parent):
1379
errors.BzrError.__init__(self)
1380
self.orphan = orphan
1381
self.parent = parent
1384
class OrphaningForbidden(OrphaningError):
1386
_fmt = "Policy: %s doesn't allow creating orphans."
1388
def __init__(self, policy):
1389
errors.BzrError.__init__(self)
1390
self.policy = policy
1393
def move_orphan(tt, orphan_id, parent_id):
1394
"""See TreeTransformBase.new_orphan.
1396
This creates a new orphan in the `bzr-orphans` dir at the root of the
1399
:param tt: The TreeTransform orphaning `trans_id`.
1401
:param orphan_id: The trans id that should be orphaned.
1403
:param parent_id: The orphan parent trans id.
1405
# Add the orphan dir if it doesn't exist
1406
orphan_dir_basename = 'bzr-orphans'
1407
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1408
if tt.final_kind(od_id) is None:
1409
tt.create_directory(od_id)
1410
parent_path = tt._tree_id_paths[parent_id]
1411
# Find a name that doesn't exist yet in the orphan dir
1412
actual_name = tt.final_name(orphan_id)
1413
new_name = tt._available_backup_name(actual_name, od_id)
1414
tt.adjust_path(new_name, od_id, orphan_id)
1415
trace.warning('%s has been orphaned in %s'
1416
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1419
def refuse_orphan(tt, orphan_id, parent_id):
1420
"""See TreeTransformBase.new_orphan.
1422
This refuses to create orphan, letting the caller handle the conflict.
1424
raise OrphaningForbidden('never')
1427
orphaning_registry = registry.Registry()
1428
orphaning_registry.register(
1429
'conflict', refuse_orphan,
1430
'Leave orphans in place and create a conflict on the directory.')
1431
orphaning_registry.register(
1432
'move', move_orphan,
1433
'Move orphans into the bzr-orphans directory.')
1434
orphaning_registry._set_default_key('conflict')
1437
class TreeTransform(DiskTreeTransform):
1438
"""Represent a tree transformation.
1440
This object is designed to support incremental generation of the transform,
1443
However, it gives optimum performance when parent directories are created
1444
before their contents. The transform is then able to put child files
1445
directly in their parent directory, avoiding later renames.
1447
It is easy to produce malformed transforms, but they are generally
1448
harmless. Attempting to apply a malformed transform will cause an
1449
exception to be raised before any modifications are made to the tree.
1451
Many kinds of malformed transforms can be corrected with the
1452
resolve_conflicts function. The remaining ones indicate programming error,
1453
such as trying to create a file with no path.
1455
Two sets of file creation methods are supplied. Convenience methods are:
1460
These are composed of the low-level methods:
1462
* create_file or create_directory or create_symlink
1466
Transform/Transaction ids
1467
-------------------------
1468
trans_ids are temporary ids assigned to all files involved in a transform.
1469
It's possible, even common, that not all files in the Tree have trans_ids.
1471
trans_ids are used because filenames and file_ids are not good enough
1472
identifiers; filenames change, and not all files have file_ids. File-ids
1473
are also associated with trans-ids, so that moving a file moves its
1476
trans_ids are only valid for the TreeTransform that generated them.
1480
Limbo is a temporary directory use to hold new versions of files.
1481
Files are added to limbo by create_file, create_directory, create_symlink,
1482
and their convenience variants (new_*). Files may be removed from limbo
1483
using cancel_creation. Files are renamed from limbo into their final
1484
location as part of TreeTransform.apply
1486
Limbo must be cleaned up, by either calling TreeTransform.apply or
1487
calling TreeTransform.finalize.
1489
Files are placed into limbo inside their parent directories, where
1490
possible. This reduces subsequent renames, and makes operations involving
1491
lots of files faster. This optimization is only possible if the parent
1492
directory is created *before* creating any of its children, so avoid
1493
creating children before parents, where possible.
1497
This temporary directory is used by _FileMover for storing files that are
1498
about to be deleted. In case of rollback, the files will be restored.
1499
FileMover does not delete files until it is sure that a rollback will not
1502
def __init__(self, tree, pb=None):
1503
"""Note: a tree_write lock is taken on the tree.
1505
Use TreeTransform.finalize() to release the lock (can be omitted if
1506
TreeTransform.apply() called).
1508
tree.lock_tree_write()
1511
limbodir = urlutils.local_path_from_url(
1512
tree._transport.abspath('limbo'))
1516
if e.errno == errno.EEXIST:
1517
raise ExistingLimbo(limbodir)
1518
deletiondir = urlutils.local_path_from_url(
1519
tree._transport.abspath('pending-deletion'))
1521
os.mkdir(deletiondir)
1523
if e.errno == errno.EEXIST:
1524
raise errors.ExistingPendingDeletion(deletiondir)
1529
# Cache of realpath results, to speed up canonical_path
1530
self._realpaths = {}
1531
# Cache of relpath results, to speed up canonical_path
1533
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1534
tree.case_sensitive)
1535
self._deletiondir = deletiondir
1537
def canonical_path(self, path):
1538
"""Get the canonical tree-relative path"""
1539
# don't follow final symlinks
1540
abs = self._tree.abspath(path)
1541
if abs in self._relpaths:
1542
return self._relpaths[abs]
1543
dirname, basename = os.path.split(abs)
1544
if dirname not in self._realpaths:
1545
self._realpaths[dirname] = os.path.realpath(dirname)
1546
dirname = self._realpaths[dirname]
1547
abs = pathjoin(dirname, basename)
1548
if dirname in self._relpaths:
1549
relpath = pathjoin(self._relpaths[dirname], basename)
1550
relpath = relpath.rstrip('/\\')
1552
relpath = self._tree.relpath(abs)
1553
self._relpaths[abs] = relpath
1556
def tree_kind(self, trans_id):
1557
"""Determine the file kind in the working tree.
1559
:returns: The file kind or None if the file does not exist
1561
path = self._tree_id_paths.get(trans_id)
1565
return file_kind(self._tree.abspath(path))
1566
except errors.NoSuchFile:
1569
def _set_mode(self, trans_id, mode_id, typefunc):
1570
"""Set the mode of new file contents.
1571
The mode_id is the existing file to get the mode from (often the same
1572
as trans_id). The operation is only performed if there's a mode match
1573
according to typefunc.
1578
old_path = self._tree_id_paths[mode_id]
1582
mode = os.stat(self._tree.abspath(old_path)).st_mode
1584
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1585
# Either old_path doesn't exist, or the parent of the
1586
# target is not a directory (but will be one eventually)
1587
# Either way, we know it doesn't exist *right now*
1588
# See also bug #248448
1593
os.chmod(self._limbo_name(trans_id), mode)
1595
def iter_tree_children(self, parent_id):
1596
"""Iterate through the entry's tree children, if any"""
1598
path = self._tree_id_paths[parent_id]
1602
children = os.listdir(self._tree.abspath(path))
1604
if not (osutils._is_error_enotdir(e)
1605
or e.errno in (errno.ENOENT, errno.ESRCH)):
1609
for child in children:
1610
childpath = joinpath(path, child)
1611
if self._tree.is_control_filename(childpath):
1613
yield self.trans_id_tree_path(childpath)
1615
def _generate_limbo_path(self, trans_id):
1616
"""Generate a limbo path using the final path if possible.
1618
This optimizes the performance of applying the tree transform by
1619
avoiding renames. These renames can be avoided only when the parent
1620
directory is already scheduled for creation.
1622
If the final path cannot be used, falls back to using the trans_id as
1625
parent = self._new_parent.get(trans_id)
1626
# if the parent directory is already in limbo (e.g. when building a
1627
# tree), choose a limbo name inside the parent, to reduce further
1629
use_direct_path = False
1630
if self._new_contents.get(parent) == 'directory':
1631
filename = self._new_name.get(trans_id)
1632
if filename is not None:
1633
if parent not in self._limbo_children:
1634
self._limbo_children[parent] = set()
1635
self._limbo_children_names[parent] = {}
1636
use_direct_path = True
1637
# the direct path can only be used if no other file has
1638
# already taken this pathname, i.e. if the name is unused, or
1639
# if it is already associated with this trans_id.
1640
elif self._case_sensitive_target:
1641
if (self._limbo_children_names[parent].get(filename)
1642
in (trans_id, None)):
1643
use_direct_path = True
1645
for l_filename, l_trans_id in\
1646
self._limbo_children_names[parent].iteritems():
1647
if l_trans_id == trans_id:
1649
if l_filename.lower() == filename.lower():
1652
use_direct_path = True
1654
if not use_direct_path:
1655
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1657
limbo_name = pathjoin(self._limbo_files[parent], filename)
1658
self._limbo_children[parent].add(trans_id)
1659
self._limbo_children_names[parent][filename] = trans_id
1663
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1664
"""Apply all changes to the inventory and filesystem.
1666
If filesystem or inventory conflicts are present, MalformedTransform
1669
If apply succeeds, finalize is not necessary.
1671
:param no_conflicts: if True, the caller guarantees there are no
1672
conflicts, so no check is made.
1673
:param precomputed_delta: An inventory delta to use instead of
1675
:param _mover: Supply an alternate FileMover, for testing
1677
if not no_conflicts:
1678
self._check_malformed()
1679
child_pb = ui.ui_factory.nested_progress_bar()
1681
if precomputed_delta is None:
1682
child_pb.update('Apply phase', 0, 2)
1683
inventory_delta = self._generate_inventory_delta()
1686
inventory_delta = precomputed_delta
1689
mover = _FileMover()
1693
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1694
self._apply_removals(mover)
1695
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1696
modified_paths = self._apply_insertions(mover)
1701
mover.apply_deletions()
1704
self._tree.apply_inventory_delta(inventory_delta)
1707
return _TransformResults(modified_paths, self.rename_count)
1709
def _generate_inventory_delta(self):
1710
"""Generate an inventory delta for the current transform."""
1711
inventory_delta = []
1712
child_pb = ui.ui_factory.nested_progress_bar()
1713
new_paths = self._inventory_altered()
1714
total_entries = len(new_paths) + len(self._removed_id)
1716
for num, trans_id in enumerate(self._removed_id):
1718
child_pb.update('removing file', num, total_entries)
1719
if trans_id == self._new_root:
1720
file_id = self._tree.get_root_id()
1722
file_id = self.tree_file_id(trans_id)
1723
# File-id isn't really being deleted, just moved
1724
if file_id in self._r_new_id:
1726
path = self._tree_id_paths[trans_id]
1727
inventory_delta.append((path, None, file_id, None))
1728
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1730
entries = self._tree.iter_entries_by_dir(
1731
new_path_file_ids.values())
1732
old_paths = dict((e.file_id, p) for p, e in entries)
1734
for num, (path, trans_id) in enumerate(new_paths):
1736
child_pb.update('adding file',
1737
num + len(self._removed_id), total_entries)
1738
file_id = new_path_file_ids[trans_id]
1742
kind = self.final_kind(trans_id)
1744
kind = self._tree.stored_kind(file_id)
1745
parent_trans_id = self.final_parent(trans_id)
1746
parent_file_id = new_path_file_ids.get(parent_trans_id)
1747
if parent_file_id is None:
1748
parent_file_id = self.final_file_id(parent_trans_id)
1749
if trans_id in self._new_reference_revision:
1750
new_entry = inventory.TreeReference(
1752
self._new_name[trans_id],
1753
self.final_file_id(self._new_parent[trans_id]),
1754
None, self._new_reference_revision[trans_id])
1756
new_entry = inventory.make_entry(kind,
1757
self.final_name(trans_id),
1758
parent_file_id, file_id)
1759
old_path = old_paths.get(new_entry.file_id)
1760
new_executability = self._new_executability.get(trans_id)
1761
if new_executability is not None:
1762
new_entry.executable = new_executability
1763
inventory_delta.append(
1764
(old_path, path, new_entry.file_id, new_entry))
1767
return inventory_delta
1769
def _apply_removals(self, mover):
1770
"""Perform tree operations that remove directory/inventory names.
1772
That is, delete files that are to be deleted, and put any files that
1773
need renaming into limbo. This must be done in strict child-to-parent
1776
If inventory_delta is None, no inventory delta generation is performed.
1778
tree_paths = list(self._tree_path_ids.iteritems())
1779
tree_paths.sort(reverse=True)
1780
child_pb = ui.ui_factory.nested_progress_bar()
1782
for num, data in enumerate(tree_paths):
1783
path, trans_id = data
1784
child_pb.update('removing file', num, len(tree_paths))
1785
full_path = self._tree.abspath(path)
1786
if trans_id in self._removed_contents:
1787
delete_path = os.path.join(self._deletiondir, trans_id)
1788
mover.pre_delete(full_path, delete_path)
1789
elif (trans_id in self._new_name
1790
or trans_id in self._new_parent):
1792
mover.rename(full_path, self._limbo_name(trans_id))
1793
except errors.TransformRenameFailed, e:
1794
if e.errno != errno.ENOENT:
1797
self.rename_count += 1
1801
def _apply_insertions(self, mover):
1802
"""Perform tree operations that insert directory/inventory names.
1804
That is, create any files that need to be created, and restore from
1805
limbo any files that needed renaming. This must be done in strict
1806
parent-to-child order.
1808
If inventory_delta is None, no inventory delta is calculated, and
1809
no list of modified paths is returned.
1811
new_paths = self.new_paths(filesystem_only=True)
1813
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1815
child_pb = ui.ui_factory.nested_progress_bar()
1817
for num, (path, trans_id) in enumerate(new_paths):
1819
child_pb.update('adding file', num, len(new_paths))
1820
full_path = self._tree.abspath(path)
1821
if trans_id in self._needs_rename:
1823
mover.rename(self._limbo_name(trans_id), full_path)
1824
except errors.TransformRenameFailed, e:
1825
# We may be renaming a dangling inventory id
1826
if e.errno != errno.ENOENT:
1829
self.rename_count += 1
1830
if (trans_id in self._new_contents or
1831
self.path_changed(trans_id)):
1832
if trans_id in self._new_contents:
1833
modified_paths.append(full_path)
1834
if trans_id in self._new_executability:
1835
self._set_executability(path, trans_id)
1838
self._new_contents.clear()
1839
return modified_paths
1842
class TransformPreview(DiskTreeTransform):
1843
"""A TreeTransform for generating preview trees.
1845
Unlike TreeTransform, this version works when the input tree is a
1846
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1847
unversioned files in the input tree.
1850
def __init__(self, tree, pb=None, case_sensitive=True):
1852
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1853
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1855
def canonical_path(self, path):
1858
def tree_kind(self, trans_id):
1859
path = self._tree_id_paths.get(trans_id)
1862
file_id = self._tree.path2id(path)
1864
return self._tree.kind(file_id)
1865
except errors.NoSuchFile:
1868
def _set_mode(self, trans_id, mode_id, typefunc):
1869
"""Set the mode of new file contents.
1870
The mode_id is the existing file to get the mode from (often the same
1871
as trans_id). The operation is only performed if there's a mode match
1872
according to typefunc.
1874
# is it ok to ignore this? probably
1877
def iter_tree_children(self, parent_id):
1878
"""Iterate through the entry's tree children, if any"""
1880
path = self._tree_id_paths[parent_id]
1883
file_id = self.tree_file_id(parent_id)
1886
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1887
children = getattr(entry, 'children', {})
1888
for child in children:
1889
childpath = joinpath(path, child)
1890
yield self.trans_id_tree_path(childpath)
1892
def new_orphan(self, trans_id, parent_id):
1893
raise NotImplementedError(self.new_orphan)
1896
class _PreviewTree(tree.Tree):
1897
"""Partial implementation of Tree to support show_diff_trees"""
1899
def __init__(self, transform):
1900
self._transform = transform
1901
self._final_paths = FinalPaths(transform)
1902
self.__by_parent = None
1903
self._parent_ids = []
1904
self._all_children_cache = {}
1905
self._path2trans_id_cache = {}
1906
self._final_name_cache = {}
1907
self._iter_changes_cache = dict((c[0], c) for c in
1908
self._transform.iter_changes())
1910
def _content_change(self, file_id):
1911
"""Return True if the content of this file changed"""
1912
changes = self._iter_changes_cache.get(file_id)
1913
# changes[2] is true if the file content changed. See
1914
# InterTree.iter_changes.
1915
return (changes is not None and changes[2])
1917
def _get_repository(self):
1918
repo = getattr(self._transform._tree, '_repository', None)
1920
repo = self._transform._tree.branch.repository
1923
def _iter_parent_trees(self):
1924
for revision_id in self.get_parent_ids():
1926
yield self.revision_tree(revision_id)
1927
except errors.NoSuchRevisionInTree:
1928
yield self._get_repository().revision_tree(revision_id)
1930
def _get_file_revision(self, file_id, vf, tree_revision):
1931
parent_keys = [(file_id, self._file_revision(t, file_id)) for t in
1932
self._iter_parent_trees()]
1933
vf.add_lines((file_id, tree_revision), parent_keys,
1934
self.get_file_lines(file_id))
1935
repo = self._get_repository()
1936
base_vf = repo.texts
1937
if base_vf not in vf.fallback_versionedfiles:
1938
vf.fallback_versionedfiles.append(base_vf)
1939
return tree_revision
1941
def _stat_limbo_file(self, file_id):
1942
trans_id = self._transform.trans_id_file_id(file_id)
1943
name = self._transform._limbo_name(trans_id)
1944
return os.lstat(name)
1947
def _by_parent(self):
1948
if self.__by_parent is None:
1949
self.__by_parent = self._transform.by_parent()
1950
return self.__by_parent
1952
def _comparison_data(self, entry, path):
1953
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
1954
if kind == 'missing':
1958
file_id = self._transform.final_file_id(self._path2trans_id(path))
1959
executable = self.is_executable(file_id, path)
1960
return kind, executable, None
1962
def is_locked(self):
1965
def lock_read(self):
1966
# Perhaps in theory, this should lock the TreeTransform?
1973
def inventory(self):
1974
"""This Tree does not use inventory as its backing data."""
1975
raise NotImplementedError(_PreviewTree.inventory)
1977
def get_root_id(self):
1978
return self._transform.final_file_id(self._transform.root)
1980
def all_file_ids(self):
1981
tree_ids = set(self._transform._tree.all_file_ids())
1982
tree_ids.difference_update(self._transform.tree_file_id(t)
1983
for t in self._transform._removed_id)
1984
tree_ids.update(self._transform._new_id.values())
1988
return iter(self.all_file_ids())
1990
def _has_id(self, file_id, fallback_check):
1991
if file_id in self._transform._r_new_id:
1993
elif file_id in set([self._transform.tree_file_id(trans_id) for
1994
trans_id in self._transform._removed_id]):
1997
return fallback_check(file_id)
1999
def has_id(self, file_id):
2000
return self._has_id(file_id, self._transform._tree.has_id)
2002
def has_or_had_id(self, file_id):
2003
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2005
def _path2trans_id(self, path):
2006
# We must not use None here, because that is a valid value to store.
2007
trans_id = self._path2trans_id_cache.get(path, object)
2008
if trans_id is not object:
2010
segments = splitpath(path)
2011
cur_parent = self._transform.root
2012
for cur_segment in segments:
2013
for child in self._all_children(cur_parent):
2014
final_name = self._final_name_cache.get(child)
2015
if final_name is None:
2016
final_name = self._transform.final_name(child)
2017
self._final_name_cache[child] = final_name
2018
if final_name == cur_segment:
2022
self._path2trans_id_cache[path] = None
2024
self._path2trans_id_cache[path] = cur_parent
2027
def path2id(self, path):
2028
return self._transform.final_file_id(self._path2trans_id(path))
2030
def id2path(self, file_id):
2031
trans_id = self._transform.trans_id_file_id(file_id)
2033
return self._final_paths._determine_path(trans_id)
2035
raise errors.NoSuchId(self, file_id)
2037
def _all_children(self, trans_id):
2038
children = self._all_children_cache.get(trans_id)
2039
if children is not None:
2041
children = set(self._transform.iter_tree_children(trans_id))
2042
# children in the _new_parent set are provided by _by_parent.
2043
children.difference_update(self._transform._new_parent.keys())
2044
children.update(self._by_parent.get(trans_id, []))
2045
self._all_children_cache[trans_id] = children
2048
def iter_children(self, file_id):
2049
trans_id = self._transform.trans_id_file_id(file_id)
2050
for child_trans_id in self._all_children(trans_id):
2051
yield self._transform.final_file_id(child_trans_id)
2054
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2055
in self._transform._tree.extras())
2056
possible_extras.update(self._transform._new_contents)
2057
possible_extras.update(self._transform._removed_id)
2058
for trans_id in possible_extras:
2059
if self._transform.final_file_id(trans_id) is None:
2060
yield self._final_paths._determine_path(trans_id)
2062
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2063
yield_parents=False):
2064
for trans_id, parent_file_id in ordered_entries:
2065
file_id = self._transform.final_file_id(trans_id)
2068
if (specific_file_ids is not None
2069
and file_id not in specific_file_ids):
2071
kind = self._transform.final_kind(trans_id)
2073
kind = self._transform._tree.stored_kind(file_id)
2074
new_entry = inventory.make_entry(
2076
self._transform.final_name(trans_id),
2077
parent_file_id, file_id)
2078
yield new_entry, trans_id
2080
def _list_files_by_dir(self):
2081
todo = [ROOT_PARENT]
2083
while len(todo) > 0:
2085
parent_file_id = self._transform.final_file_id(parent)
2086
children = list(self._all_children(parent))
2087
paths = dict(zip(children, self._final_paths.get_paths(children)))
2088
children.sort(key=paths.get)
2089
todo.extend(reversed(children))
2090
for trans_id in children:
2091
ordered_ids.append((trans_id, parent_file_id))
2094
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2095
# This may not be a maximally efficient implementation, but it is
2096
# reasonably straightforward. An implementation that grafts the
2097
# TreeTransform changes onto the tree's iter_entries_by_dir results
2098
# might be more efficient, but requires tricky inferences about stack
2100
ordered_ids = self._list_files_by_dir()
2101
for entry, trans_id in self._make_inv_entries(ordered_ids,
2102
specific_file_ids, yield_parents=yield_parents):
2103
yield unicode(self._final_paths.get_path(trans_id)), entry
2105
def _iter_entries_for_dir(self, dir_path):
2106
"""Return path, entry for items in a directory without recursing down."""
2107
dir_file_id = self.path2id(dir_path)
2109
for file_id in self.iter_children(dir_file_id):
2110
trans_id = self._transform.trans_id_file_id(file_id)
2111
ordered_ids.append((trans_id, file_id))
2112
for entry, trans_id in self._make_inv_entries(ordered_ids):
2113
yield unicode(self._final_paths.get_path(trans_id)), entry
2115
def list_files(self, include_root=False, from_dir=None, recursive=True):
2116
"""See WorkingTree.list_files."""
2117
# XXX This should behave like WorkingTree.list_files, but is really
2118
# more like RevisionTree.list_files.
2122
prefix = from_dir + '/'
2123
entries = self.iter_entries_by_dir()
2124
for path, entry in entries:
2125
if entry.name == '' and not include_root:
2128
if not path.startswith(prefix):
2130
path = path[len(prefix):]
2131
yield path, 'V', entry.kind, entry.file_id, entry
2133
if from_dir is None and include_root is True:
2134
root_entry = inventory.make_entry('directory', '',
2135
ROOT_PARENT, self.get_root_id())
2136
yield '', 'V', 'directory', root_entry.file_id, root_entry
2137
entries = self._iter_entries_for_dir(from_dir or '')
2138
for path, entry in entries:
2139
yield path, 'V', entry.kind, entry.file_id, entry
2141
def kind(self, file_id):
2142
trans_id = self._transform.trans_id_file_id(file_id)
2143
return self._transform.final_kind(trans_id)
2145
def stored_kind(self, file_id):
2146
trans_id = self._transform.trans_id_file_id(file_id)
2148
return self._transform._new_contents[trans_id]
2150
return self._transform._tree.stored_kind(file_id)
2152
def get_file_mtime(self, file_id, path=None):
2153
"""See Tree.get_file_mtime"""
2154
if not self._content_change(file_id):
2155
return self._transform._tree.get_file_mtime(file_id)
2156
return self._stat_limbo_file(file_id).st_mtime
2158
def _file_size(self, entry, stat_value):
2159
return self.get_file_size(entry.file_id)
2161
def get_file_size(self, file_id):
2162
"""See Tree.get_file_size"""
2163
if self.kind(file_id) == 'file':
2164
return self._transform._tree.get_file_size(file_id)
2168
def get_file_sha1(self, file_id, path=None, stat_value=None):
2169
trans_id = self._transform.trans_id_file_id(file_id)
2170
kind = self._transform._new_contents.get(trans_id)
2172
return self._transform._tree.get_file_sha1(file_id)
2174
fileobj = self.get_file(file_id)
2176
return sha_file(fileobj)
2180
def is_executable(self, file_id, path=None):
2183
trans_id = self._transform.trans_id_file_id(file_id)
2185
return self._transform._new_executability[trans_id]
2188
return self._transform._tree.is_executable(file_id, path)
2190
if e.errno == errno.ENOENT:
2193
except errors.NoSuchId:
2196
def path_content_summary(self, path):
2197
trans_id = self._path2trans_id(path)
2198
tt = self._transform
2199
tree_path = tt._tree_id_paths.get(trans_id)
2200
kind = tt._new_contents.get(trans_id)
2202
if tree_path is None or trans_id in tt._removed_contents:
2203
return 'missing', None, None, None
2204
summary = tt._tree.path_content_summary(tree_path)
2205
kind, size, executable, link_or_sha1 = summary
2208
limbo_name = tt._limbo_name(trans_id)
2209
if trans_id in tt._new_reference_revision:
2210
kind = 'tree-reference'
2212
statval = os.lstat(limbo_name)
2213
size = statval.st_size
2214
if not supports_executable():
2217
executable = statval.st_mode & S_IEXEC
2221
if kind == 'symlink':
2222
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2223
executable = tt._new_executability.get(trans_id, executable)
2224
return kind, size, executable, link_or_sha1
2226
def iter_changes(self, from_tree, include_unchanged=False,
2227
specific_files=None, pb=None, extra_trees=None,
2228
require_versioned=True, want_unversioned=False):
2229
"""See InterTree.iter_changes.
2231
This has a fast path that is only used when the from_tree matches
2232
the transform tree, and no fancy options are supplied.
2234
if (from_tree is not self._transform._tree or include_unchanged or
2235
specific_files or want_unversioned):
2236
return tree.InterTree(from_tree, self).iter_changes(
2237
include_unchanged=include_unchanged,
2238
specific_files=specific_files,
2240
extra_trees=extra_trees,
2241
require_versioned=require_versioned,
2242
want_unversioned=want_unversioned)
2243
if want_unversioned:
2244
raise ValueError('want_unversioned is not supported')
2245
return self._transform.iter_changes()
2247
def get_file(self, file_id, path=None):
2248
"""See Tree.get_file"""
2249
if not self._content_change(file_id):
2250
return self._transform._tree.get_file(file_id, path)
2251
trans_id = self._transform.trans_id_file_id(file_id)
2252
name = self._transform._limbo_name(trans_id)
2253
return open(name, 'rb')
2255
def get_file_with_stat(self, file_id, path=None):
2256
return self.get_file(file_id, path), None
2258
def annotate_iter(self, file_id,
2259
default_revision=_mod_revision.CURRENT_REVISION):
2260
changes = self._iter_changes_cache.get(file_id)
2264
changed_content, versioned, kind = (changes[2], changes[3],
2268
get_old = (kind[0] == 'file' and versioned[0])
2270
old_annotation = self._transform._tree.annotate_iter(file_id,
2271
default_revision=default_revision)
2275
return old_annotation
2276
if not changed_content:
2277
return old_annotation
2278
# TODO: This is doing something similar to what WT.annotate_iter is
2279
# doing, however it fails slightly because it doesn't know what
2280
# the *other* revision_id is, so it doesn't know how to give the
2281
# other as the origin for some lines, they all get
2282
# 'default_revision'
2283
# It would be nice to be able to use the new Annotator based
2284
# approach, as well.
2285
return annotate.reannotate([old_annotation],
2286
self.get_file(file_id).readlines(),
2289
def get_symlink_target(self, file_id):
2290
"""See Tree.get_symlink_target"""
2291
if not self._content_change(file_id):
2292
return self._transform._tree.get_symlink_target(file_id)
2293
trans_id = self._transform.trans_id_file_id(file_id)
2294
name = self._transform._limbo_name(trans_id)
2295
return osutils.readlink(name)
2297
def walkdirs(self, prefix=''):
2298
pending = [self._transform.root]
2299
while len(pending) > 0:
2300
parent_id = pending.pop()
2303
prefix = prefix.rstrip('/')
2304
parent_path = self._final_paths.get_path(parent_id)
2305
parent_file_id = self._transform.final_file_id(parent_id)
2306
for child_id in self._all_children(parent_id):
2307
path_from_root = self._final_paths.get_path(child_id)
2308
basename = self._transform.final_name(child_id)
2309
file_id = self._transform.final_file_id(child_id)
2310
kind = self._transform.final_kind(child_id)
2311
if kind is not None:
2312
versioned_kind = kind
2315
versioned_kind = self._transform._tree.stored_kind(file_id)
2316
if versioned_kind == 'directory':
2317
subdirs.append(child_id)
2318
children.append((path_from_root, basename, kind, None,
2319
file_id, versioned_kind))
2321
if parent_path.startswith(prefix):
2322
yield (parent_path, parent_file_id), children
2323
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2326
def get_parent_ids(self):
2327
return self._parent_ids
2329
def set_parent_ids(self, parent_ids):
2330
self._parent_ids = parent_ids
2332
def get_revision_tree(self, revision_id):
2333
return self._transform._tree.get_revision_tree(revision_id)
864
2336
def joinpath(parent, child):
865
2337
"""Join tree-relative paths, handling the tree root specially"""
866
2338
if parent is None or parent == "":
896
2368
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2369
return self._known_paths[trans_id]
2371
def get_paths(self, trans_ids):
2372
return [(self.get_path(t), t) for t in trans_ids]
899
2376
def topology_sorted_ids(tree):
900
2377
"""Determine the topological order of the ids in a tree"""
901
2378
file_ids = list(tree)
902
2379
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2383
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2384
delta_from_tree=False):
2385
"""Create working tree for a branch, using a TreeTransform.
2387
This function should be used on empty trees, having a tree root at most.
2388
(see merge and revert functionality for working with existing trees)
2390
Existing files are handled like so:
2392
- Existing bzrdirs take precedence over creating new items. They are
2393
created as '%s.diverted' % name.
2394
- Otherwise, if the content on disk matches the content we are building,
2395
it is silently replaced.
2396
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2398
:param tree: The tree to convert wt into a copy of
2399
:param wt: The working tree that files will be placed into
2400
:param accelerator_tree: A tree which can be used for retrieving file
2401
contents more quickly than tree itself, i.e. a workingtree. tree
2402
will be used for cases where accelerator_tree's content is different.
2403
:param hardlink: If true, hard-link files to accelerator_tree, where
2404
possible. accelerator_tree must implement abspath, i.e. be a
2406
:param delta_from_tree: If true, build_tree may use the input Tree to
2407
generate the inventory delta.
2409
wt.lock_tree_write()
2413
if accelerator_tree is not None:
2414
accelerator_tree.lock_read()
2416
return _build_tree(tree, wt, accelerator_tree, hardlink,
2419
if accelerator_tree is not None:
2420
accelerator_tree.unlock()
2427
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2428
"""See build_tree."""
2429
for num, _unused in enumerate(wt.all_file_ids()):
2430
if num > 0: # more than just a root
2431
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2432
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2433
top_pb = ui.ui_factory.nested_progress_bar()
909
2434
pp = ProgressPhase("Build phase", 2, top_pb)
2435
if tree.inventory.root is not None:
2436
# This is kind of a hack: we should be altering the root
2437
# as part of the regular tree shape diff logic.
2438
# The conditional test here is to avoid doing an
2439
# expensive operation (flush) every time the root id
2440
# is set within the tree, nor setting the root and thus
2441
# marking the tree as dirty, because we use two different
2442
# idioms here: tree interfaces and inventory interfaces.
2443
if wt.get_root_id() != tree.get_root_id():
2444
wt.set_root_id(tree.get_root_id())
910
2446
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2450
file_trans_id[wt.get_root_id()] = \
2451
tt.trans_id_tree_file_id(wt.get_root_id())
2452
pb = ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2454
deferred_contents = []
2456
total = len(tree.inventory)
2458
precomputed_delta = []
2460
precomputed_delta = None
2461
# Check if tree inventory has content. If so, we populate
2462
# existing_files with the directory content. If there are no
2463
# entries we skip populating existing_files as its not used.
2464
# This improves performance and unncessary work on large
2465
# directory trees. (#501307)
2467
existing_files = set()
2468
for dir, files in wt.walkdirs():
2469
existing_files.update(f[0] for f in files)
2470
for num, (tree_path, entry) in \
2471
enumerate(tree.inventory.iter_entries_by_dir()):
2472
pb.update("Building tree", num - len(deferred_contents), total)
920
2473
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2476
file_id = entry.file_id
2478
precomputed_delta.append((None, tree_path, file_id, entry))
2479
if tree_path in existing_files:
2480
target_path = wt.abspath(tree_path)
2481
kind = file_kind(target_path)
2482
if kind == "directory":
2484
bzrdir.BzrDir.open(target_path)
2485
except errors.NotBranchError:
2489
if (file_id not in divert and
2490
_content_match(tree, entry, file_id, kind,
2492
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2493
if kind == 'directory':
924
2495
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2496
if entry.kind == 'file':
2497
# We *almost* replicate new_by_entry, so that we can defer
2498
# getting the file text, and get them all at once.
2499
trans_id = tt.create_path(entry.name, parent_id)
2500
file_trans_id[file_id] = trans_id
2501
tt.version_file(file_id, trans_id)
2502
executable = tree.is_executable(file_id, tree_path)
2504
tt.set_executability(executable, trans_id)
2505
trans_data = (trans_id, tree_path)
2506
deferred_contents.append((file_id, trans_data))
2508
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2511
new_trans_id = file_trans_id[file_id]
2512
old_parent = tt.trans_id_tree_path(tree_path)
2513
_reparent_children(tt, old_parent, new_trans_id)
2514
offset = num + 1 - len(deferred_contents)
2515
_create_files(tt, tree, deferred_contents, pb, offset,
2516
accelerator_tree, hardlink)
2520
divert_trans = set(file_trans_id[f] for f in divert)
2521
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2522
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2523
if len(raw_conflicts) > 0:
2524
precomputed_delta = None
2525
conflicts = cook_conflicts(raw_conflicts, tt)
2526
for conflict in conflicts:
2529
wt.add_conflicts(conflicts)
2530
except errors.UnsupportedOperation:
2532
result = tt.apply(no_conflicts=True,
2533
precomputed_delta=precomputed_delta)
933
2536
top_pb.finished()
2540
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2542
total = len(desired_files) + offset
2544
if accelerator_tree is None:
2545
new_desired_files = desired_files
2547
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2548
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2549
in iter if not (c or e[0] != e[1])]
2550
if accelerator_tree.supports_content_filtering():
2551
unchanged = [(f, p) for (f, p) in unchanged
2552
if not accelerator_tree.iter_search_rules([p]).next()]
2553
unchanged = dict(unchanged)
2554
new_desired_files = []
2556
for file_id, (trans_id, tree_path) in desired_files:
2557
accelerator_path = unchanged.get(file_id)
2558
if accelerator_path is None:
2559
new_desired_files.append((file_id, (trans_id, tree_path)))
2561
pb.update('Adding file contents', count + offset, total)
2563
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2566
contents = accelerator_tree.get_file(file_id, accelerator_path)
2567
if wt.supports_content_filtering():
2568
filters = wt._content_filter_stack(tree_path)
2569
contents = filtered_output_bytes(contents, filters,
2570
ContentFilterContext(tree_path, tree))
2572
tt.create_file(contents, trans_id)
2576
except AttributeError:
2577
# after filtering, contents may no longer be file-like
2581
for count, ((trans_id, tree_path), contents) in enumerate(
2582
tree.iter_files_bytes(new_desired_files)):
2583
if wt.supports_content_filtering():
2584
filters = wt._content_filter_stack(tree_path)
2585
contents = filtered_output_bytes(contents, filters,
2586
ContentFilterContext(tree_path, tree))
2587
tt.create_file(contents, trans_id)
2588
pb.update('Adding file contents', count + offset, total)
2591
def _reparent_children(tt, old_parent, new_parent):
2592
for child in tt.iter_tree_children(old_parent):
2593
tt.adjust_path(tt.final_name(child), new_parent, child)
2596
def _reparent_transform_children(tt, old_parent, new_parent):
2597
by_parent = tt.by_parent()
2598
for child in by_parent[old_parent]:
2599
tt.adjust_path(tt.final_name(child), new_parent, child)
2600
return by_parent[old_parent]
2603
def _content_match(tree, entry, file_id, kind, target_path):
2604
if entry.kind != kind:
2606
if entry.kind == "directory":
2608
if entry.kind == "file":
2609
f = file(target_path, 'rb')
2611
if tree.get_file_text(file_id) == f.read():
2615
elif entry.kind == "symlink":
2616
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2621
def resolve_checkout(tt, conflicts, divert):
2622
new_conflicts = set()
2623
for c_type, conflict in ((c[0], c) for c in conflicts):
2624
# Anything but a 'duplicate' would indicate programmer error
2625
if c_type != 'duplicate':
2626
raise AssertionError(c_type)
2627
# Now figure out which is new and which is old
2628
if tt.new_contents(conflict[1]):
2629
new_file = conflict[1]
2630
old_file = conflict[2]
2632
new_file = conflict[2]
2633
old_file = conflict[1]
2635
# We should only get here if the conflict wasn't completely
2637
final_parent = tt.final_parent(old_file)
2638
if new_file in divert:
2639
new_name = tt.final_name(old_file)+'.diverted'
2640
tt.adjust_path(new_name, final_parent, new_file)
2641
new_conflicts.add((c_type, 'Diverted to',
2642
new_file, old_file))
2644
new_name = tt.final_name(old_file)+'.moved'
2645
tt.adjust_path(new_name, final_parent, old_file)
2646
new_conflicts.add((c_type, 'Moved existing file to',
2647
old_file, new_file))
2648
return new_conflicts
935
2651
def new_by_entry(tt, entry, parent_id, tree):
936
2652
"""Create a new file according to its inventory entry"""
1046
2736
working_kind = working_tree.kind(file_id)
1047
2737
has_contents = True
1049
if e.errno != errno.ENOENT:
1051
2739
has_contents = False
1052
2740
contents_mod = True
1053
2741
meta_mod = False
1054
2742
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
2743
if entry.kind != working_kind:
1059
2744
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
2746
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
2748
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
2749
cur_entry._forget_tree_state()
1065
2750
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2753
def revert(working_tree, target_tree, filenames, backups=False,
2754
pb=None, change_reporter=None):
1070
2755
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
2756
target_tree.lock_read()
2757
pb = ui.ui_factory.nested_progress_bar()
1075
2758
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2760
pp = ProgressPhase("Revert phase", 3, pb)
2761
conflicts, merge_modified = _prepare_revert_transform(
2762
working_tree, target_tree, tt, filenames, backups, pp)
2764
change_reporter = delta._ChangeReporter(
2765
unversioned_filter=working_tree.is_ignored)
2766
delta.report_changes(tt.iter_changes(), change_reporter)
2767
for conflict in conflicts:
2771
working_tree.set_merge_modified(merge_modified)
2773
target_tree.unlock()
2779
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2780
backups, pp, basis_tree=None,
2781
merge_modified=None):
2782
child_pb = ui.ui_factory.nested_progress_bar()
2784
if merge_modified is None:
2785
merge_modified = working_tree.merge_modified()
2786
merge_modified = _alter_files(working_tree, target_tree, tt,
2787
child_pb, filenames, backups,
2788
merge_modified, basis_tree)
2791
child_pb = ui.ui_factory.nested_progress_bar()
2793
raw_conflicts = resolve_conflicts(tt, child_pb,
2794
lambda t, c: conflict_pass(t, c, target_tree))
2797
conflicts = cook_conflicts(raw_conflicts, tt)
2798
return conflicts, merge_modified
2801
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2802
backups, merge_modified, basis_tree=None):
2803
if basis_tree is not None:
2804
basis_tree.lock_read()
2805
change_list = target_tree.iter_changes(working_tree,
2806
specific_files=specific_files, pb=pb)
2807
if target_tree.get_root_id() is None:
2813
for id_num, (file_id, path, changed_content, versioned, parent, name,
2814
kind, executable) in enumerate(change_list):
2815
if skip_root and file_id[0] is not None and parent[0] is None:
2817
trans_id = tt.trans_id_file_id(file_id)
2820
keep_content = False
2821
if kind[0] == 'file' and (backups or kind[1] is None):
2822
wt_sha1 = working_tree.get_file_sha1(file_id)
2823
if merge_modified.get(file_id) != wt_sha1:
2824
# acquire the basis tree lazily to prevent the
2825
# expense of accessing it when it's not needed ?
2826
# (Guessing, RBC, 200702)
2827
if basis_tree is None:
2828
basis_tree = working_tree.basis_tree()
2829
basis_tree.lock_read()
2830
if file_id in basis_tree:
2831
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2833
elif kind[1] is None and not versioned[1]:
2835
if kind[0] is not None:
2836
if not keep_content:
1121
2837
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2838
elif kind[1] is not None:
2839
parent_trans_id = tt.trans_id_file_id(parent[0])
2840
backup_name = tt._available_backup_name(
2841
name[0], parent_trans_id)
2842
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2843
new_trans_id = tt.create_path(name[0], parent_trans_id)
2844
if versioned == (True, True):
2845
tt.unversion_file(trans_id)
2846
tt.version_file(file_id, new_trans_id)
2847
# New contents should have the same unix perms as old
2850
trans_id = new_trans_id
2851
if kind[1] in ('directory', 'tree-reference'):
2852
tt.create_directory(trans_id)
2853
if kind[1] == 'tree-reference':
2854
revision = target_tree.get_reference_revision(file_id,
2856
tt.set_tree_reference(revision, trans_id)
2857
elif kind[1] == 'symlink':
2858
tt.create_symlink(target_tree.get_symlink_target(file_id),
2860
elif kind[1] == 'file':
2861
deferred_files.append((file_id, (trans_id, mode_id)))
2862
if basis_tree is None:
2863
basis_tree = working_tree.basis_tree()
2864
basis_tree.lock_read()
2865
new_sha1 = target_tree.get_file_sha1(file_id)
2866
if (file_id in basis_tree and new_sha1 ==
2867
basis_tree.get_file_sha1(file_id)):
2868
if file_id in merge_modified:
2869
del merge_modified[file_id]
2871
merge_modified[file_id] = new_sha1
2873
# preserve the execute bit when backing up
2874
if keep_content and executable[0] == executable[1]:
2875
tt.set_executability(executable[1], trans_id)
2876
elif kind[1] is not None:
2877
raise AssertionError(kind[1])
2878
if versioned == (False, True):
2879
tt.version_file(file_id, trans_id)
2880
if versioned == (True, False):
2881
tt.unversion_file(trans_id)
2882
if (name[1] is not None and
2883
(name[0] != name[1] or parent[0] != parent[1])):
2884
if name[1] == '' and parent[1] is None:
2885
parent_trans = ROOT_PARENT
2887
parent_trans = tt.trans_id_file_id(parent[1])
2888
if parent[0] is None and versioned[0]:
2889
tt.adjust_root_path(name[1], parent_trans)
2891
tt.adjust_path(name[1], parent_trans, trans_id)
2892
if executable[0] != executable[1] and kind[1] == "file":
2893
tt.set_executability(executable[1], trans_id)
2894
if working_tree.supports_content_filtering():
2895
for index, ((trans_id, mode_id), bytes) in enumerate(
2896
target_tree.iter_files_bytes(deferred_files)):
2897
file_id = deferred_files[index][0]
2898
# We're reverting a tree to the target tree so using the
2899
# target tree to find the file path seems the best choice
2900
# here IMO - Ian C 27/Oct/2009
2901
filter_tree_path = target_tree.id2path(file_id)
2902
filters = working_tree._content_filter_stack(filter_tree_path)
2903
bytes = filtered_output_bytes(bytes, filters,
2904
ContentFilterContext(filter_tree_path, working_tree))
2905
tt.create_file(bytes, trans_id, mode_id)
2907
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
2909
tt.create_file(bytes, trans_id, mode_id)
2910
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
2912
if basis_tree is not None:
2914
return merge_modified
2917
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
2918
"""Make many conflict-resolution attempts, but die if they fail"""
2919
if pass_func is None:
2920
pass_func = conflict_pass
1145
2921
new_conflicts = set()
2922
pb = ui.ui_factory.nested_progress_bar()
1147
2924
for n in range(10):
1148
2925
pb.update('Resolution pass', n+1, 10)
1149
2926
conflicts = tt.find_conflicts()
1150
2927
if len(conflicts) == 0:
1151
2928
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
2929
new_conflicts.update(pass_func(tt, conflicts))
1153
2930
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
2935
def conflict_pass(tt, conflicts, path_tree=None):
2936
"""Resolve some classes of conflicts.
2938
:param tt: The transform to resolve conflicts in
2939
:param conflicts: The conflicts to resolve
2940
:param path_tree: A Tree to get supplemental paths from
1160
2942
new_conflicts = set()
1161
2943
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
2944
if c_type == 'duplicate id':