861
825
self.create_symlink(target, trans_id)
828
def new_orphan(self, trans_id, parent_id):
829
"""Schedule an item to be orphaned.
831
When a directory is about to be removed, its children, if they are not
832
versioned are moved out of the way: they don't have a parent anymore.
834
:param trans_id: The trans_id of the existing item.
835
:param parent_id: The parent trans_id of the item.
837
raise NotImplementedError(self.new_orphan)
839
def _get_potential_orphans(self, dir_id):
840
"""Find the potential orphans in a directory.
842
A directory can't be safely deleted if there are versioned files in it.
843
If all the contained files are unversioned then they can be orphaned.
845
The 'None' return value means that the directory contains at least one
846
versioned file and should not be deleted.
848
:param dir_id: The directory trans id.
850
:return: A list of the orphan trans ids or None if at least one
851
versioned file is present.
854
# Find the potential orphans, stop if one item should be kept
855
for child_tid in self.by_parent()[dir_id]:
856
if child_tid in self._removed_contents:
857
# The child is removed as part of the transform. Since it was
858
# versioned before, it's not an orphan
860
elif self.final_file_id(child_tid) is None:
861
# The child is not versioned
862
orphans.append(child_tid)
864
# We have a versioned file here, searching for orphans is
870
def _affected_ids(self):
871
"""Return the set of transform ids affected by the transform"""
872
trans_ids = set(self._removed_id)
873
trans_ids.update(self._new_id)
874
trans_ids.update(self._removed_contents)
875
trans_ids.update(self._new_contents)
876
trans_ids.update(self._new_executability)
877
trans_ids.update(self._new_name)
878
trans_ids.update(self._new_parent)
881
def _get_file_id_maps(self):
882
"""Return mapping of file_ids to trans_ids in the to and from states"""
883
trans_ids = self._affected_ids()
886
# Build up two dicts: trans_ids associated with file ids in the
887
# FROM state, vs the TO state.
888
for trans_id in trans_ids:
889
from_file_id = self.tree_file_id(trans_id)
890
if from_file_id is not None:
891
from_trans_ids[from_file_id] = trans_id
892
to_file_id = self.final_file_id(trans_id)
893
if to_file_id is not None:
894
to_trans_ids[to_file_id] = trans_id
895
return from_trans_ids, to_trans_ids
897
def _from_file_data(self, from_trans_id, from_versioned, from_path):
898
"""Get data about a file in the from (tree) state
900
Return a (name, parent, kind, executable) tuple
902
from_path = self._tree_id_paths.get(from_trans_id)
904
# get data from working tree if versioned
905
from_entry = next(self._tree.iter_entries_by_dir(
906
specific_files=[from_path]))[1]
907
from_name = from_entry.name
908
from_parent = from_entry.parent_id
911
if from_path is None:
912
# File does not exist in FROM state
916
# File exists, but is not versioned. Have to use path-
918
from_name = os.path.basename(from_path)
919
tree_parent = self.get_tree_parent(from_trans_id)
920
from_parent = self.tree_file_id(tree_parent)
921
if from_path is not None:
922
from_kind, from_executable, from_stats = \
923
self._tree._comparison_data(from_entry, from_path)
926
from_executable = False
927
return from_name, from_parent, from_kind, from_executable
929
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
930
"""Get data about a file in the to (target) state
932
Return a (name, parent, kind, executable) tuple
934
to_name = self.final_name(to_trans_id)
935
to_kind = self.final_kind(to_trans_id)
936
to_parent = self.final_file_id(self.final_parent(to_trans_id))
937
if to_trans_id in self._new_executability:
938
to_executable = self._new_executability[to_trans_id]
939
elif to_trans_id == from_trans_id:
940
to_executable = from_executable
942
to_executable = False
943
return to_name, to_parent, to_kind, to_executable
945
def iter_changes(self):
946
"""Produce output in the same format as Tree.iter_changes.
948
Will produce nonsensical results if invoked while inventory/filesystem
949
conflicts (as reported by TreeTransform.find_conflicts()) are present.
951
This reads the Transform, but only reproduces changes involving a
952
file_id. Files that are not versioned in either of the FROM or TO
953
states are not reflected.
955
final_paths = FinalPaths(self)
956
from_trans_ids, to_trans_ids = self._get_file_id_maps()
958
# Now iterate through all active file_ids
959
for file_id in set(from_trans_ids).union(to_trans_ids):
961
from_trans_id = from_trans_ids.get(file_id)
962
# find file ids, and determine versioning state
963
if from_trans_id is None:
964
from_versioned = False
965
from_trans_id = to_trans_ids[file_id]
967
from_versioned = True
968
to_trans_id = to_trans_ids.get(file_id)
969
if to_trans_id is None:
971
to_trans_id = from_trans_id
975
if not from_versioned:
978
from_path = self._tree_id_paths.get(from_trans_id)
982
to_path = final_paths.get_path(to_trans_id)
984
from_name, from_parent, from_kind, from_executable = \
985
self._from_file_data(from_trans_id, from_versioned, from_path)
987
to_name, to_parent, to_kind, to_executable = \
988
self._to_file_data(to_trans_id, from_trans_id, from_executable)
990
if from_kind != to_kind:
992
elif to_kind in ('file', 'symlink') and (
993
to_trans_id != from_trans_id or
994
to_trans_id in self._new_contents):
996
if (not modified and from_versioned == to_versioned and
997
from_parent==to_parent and from_name == to_name and
998
from_executable == to_executable):
1000
results.append((file_id, (from_path, to_path), modified,
1001
(from_versioned, to_versioned),
1002
(from_parent, to_parent),
1003
(from_name, to_name),
1004
(from_kind, to_kind),
1005
(from_executable, to_executable)))
1009
return (paths[0] or '', paths[1] or '')
1010
return iter(sorted(results, key=path_key))
1012
def get_preview_tree(self):
1013
"""Return a tree representing the result of the transform.
1015
The tree is a snapshot, and altering the TreeTransform will invalidate
1018
return _PreviewTree(self)
1020
def commit(self, branch, message, merge_parents=None, strict=False,
1021
timestamp=None, timezone=None, committer=None, authors=None,
1022
revprops=None, revision_id=None):
1023
"""Commit the result of this TreeTransform to a branch.
1025
:param branch: The branch to commit to.
1026
:param message: The message to attach to the commit.
1027
:param merge_parents: Additional parent revision-ids specified by
1029
:param strict: If True, abort the commit if there are unversioned
1031
:param timestamp: if not None, seconds-since-epoch for the time and
1032
date. (May be a float.)
1033
:param timezone: Optional timezone for timestamp, as an offset in
1035
:param committer: Optional committer in email-id format.
1036
(e.g. "J Random Hacker <jrandom@example.com>")
1037
:param authors: Optional list of authors in email-id format.
1038
:param revprops: Optional dictionary of revision properties.
1039
:param revision_id: Optional revision id. (Specifying a revision-id
1040
may reduce performance for some non-native formats.)
1041
:return: The revision_id of the revision committed.
1043
self._check_malformed()
1045
unversioned = set(self._new_contents).difference(set(self._new_id))
1046
for trans_id in unversioned:
1047
if self.final_file_id(trans_id) is None:
1048
raise errors.StrictCommitFailed()
1050
revno, last_rev_id = branch.last_revision_info()
1051
if last_rev_id == _mod_revision.NULL_REVISION:
1052
if merge_parents is not None:
1053
raise ValueError('Cannot supply merge parents for first'
1057
parent_ids = [last_rev_id]
1058
if merge_parents is not None:
1059
parent_ids.extend(merge_parents)
1060
if self._tree.get_revision_id() != last_rev_id:
1061
raise ValueError('TreeTransform not based on branch basis: %s' %
1062
self._tree.get_revision_id().decode('utf-8'))
1063
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1064
builder = branch.get_commit_builder(parent_ids,
1065
timestamp=timestamp,
1067
committer=committer,
1069
revision_id=revision_id)
1070
preview = self.get_preview_tree()
1071
list(builder.record_iter_changes(preview, last_rev_id,
1072
self.iter_changes()))
1073
builder.finish_inventory()
1074
revision_id = builder.commit(message)
1075
branch.set_last_revision_info(revno + 1, revision_id)
1078
def _text_parent(self, trans_id):
1079
path = self.tree_path(trans_id)
1081
if path is None or self._tree.kind(path) != 'file':
1083
except errors.NoSuchFile:
1087
def _get_parents_texts(self, trans_id):
1088
"""Get texts for compression parents of this file."""
1089
path = self._text_parent(trans_id)
1092
return (self._tree.get_file_text(path),)
1094
def _get_parents_lines(self, trans_id):
1095
"""Get lines for compression parents of this file."""
1096
path = self._text_parent(trans_id)
1099
return (self._tree.get_file_lines(path),)
1101
def serialize(self, serializer):
1102
"""Serialize this TreeTransform.
1104
:param serializer: A Serialiser like pack.ContainerSerializer.
1106
new_name = {k.encode('utf-8'): v.encode('utf-8')
1107
for k, v in viewitems(self._new_name)}
1108
new_parent = {k.encode('utf-8'): v.encode('utf-8')
1109
for k, v in viewitems(self._new_parent)}
1110
new_id = {k.encode('utf-8'): v
1111
for k, v in viewitems(self._new_id)}
1112
new_executability = {k.encode('utf-8'): int(v)
1113
for k, v in viewitems(self._new_executability)}
1114
tree_path_ids = {k.encode('utf-8'): v.encode('utf-8')
1115
for k, v in viewitems(self._tree_path_ids)}
1116
non_present_ids = {k: v.encode('utf-8')
1117
for k, v in viewitems(self._non_present_ids)}
1118
removed_contents = [trans_id.encode('utf-8')
1119
for trans_id in self._removed_contents]
1120
removed_id = [trans_id.encode('utf-8')
1121
for trans_id in self._removed_id]
1123
b'_id_number': self._id_number,
1124
b'_new_name': new_name,
1125
b'_new_parent': new_parent,
1126
b'_new_executability': new_executability,
1128
b'_tree_path_ids': tree_path_ids,
1129
b'_removed_id': removed_id,
1130
b'_removed_contents': removed_contents,
1131
b'_non_present_ids': non_present_ids,
1133
yield serializer.bytes_record(bencode.bencode(attribs),
1135
for trans_id, kind in sorted(viewitems(self._new_contents)):
1137
with open(self._limbo_name(trans_id), 'rb') as cur_file:
1138
lines = cur_file.readlines()
1139
parents = self._get_parents_lines(trans_id)
1140
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1141
content = b''.join(mpdiff.to_patch())
1142
if kind == 'directory':
1144
if kind == 'symlink':
1145
content = self._read_symlink_target(trans_id)
1146
if not isinstance(content, bytes):
1147
content = content.encode('utf-8')
1148
yield serializer.bytes_record(
1149
content, ((trans_id.encode('utf-8'), kind.encode('ascii')),))
1151
def deserialize(self, records):
1152
"""Deserialize a stored TreeTransform.
1154
:param records: An iterable of (names, content) tuples, as per
1155
pack.ContainerPushParser.
1157
names, content = next(records)
1158
attribs = bencode.bdecode(content)
1159
self._id_number = attribs[b'_id_number']
1160
self._new_name = {k.decode('utf-8'): v.decode('utf-8')
1161
for k, v in viewitems(attribs[b'_new_name'])}
1162
self._new_parent = {k.decode('utf-8'): v.decode('utf-8')
1163
for k, v in viewitems(attribs[b'_new_parent'])}
1164
self._new_executability = {k.decode('utf-8'): bool(v)
1165
for k, v in viewitems(attribs[b'_new_executability'])}
1166
self._new_id = {k.decode('utf-8'): v
1167
for k, v in viewitems(attribs[b'_new_id'])}
1168
self._r_new_id = {v: k for k, v in viewitems(self._new_id)}
1169
self._tree_path_ids = {}
1170
self._tree_id_paths = {}
1171
for bytepath, trans_id in viewitems(attribs[b'_tree_path_ids']):
1172
path = bytepath.decode('utf-8')
1173
trans_id = trans_id.decode('utf-8')
1174
self._tree_path_ids[path] = trans_id
1175
self._tree_id_paths[trans_id] = path
1176
self._removed_id = {trans_id.decode('utf-8')
1177
for trans_id in attribs[b'_removed_id']}
1178
self._removed_contents = set(trans_id.decode('utf-8')
1179
for trans_id in attribs[b'_removed_contents'])
1180
self._non_present_ids = {k: v.decode('utf-8')
1181
for k, v in viewitems(attribs[b'_non_present_ids'])}
1182
for ((trans_id, kind),), content in records:
1183
trans_id = trans_id.decode('utf-8')
1184
kind = kind.decode('ascii')
1186
mpdiff = multiparent.MultiParent.from_patch(content)
1187
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1188
self.create_file(lines, trans_id)
1189
if kind == 'directory':
1190
self.create_directory(trans_id)
1191
if kind == 'symlink':
1192
self.create_symlink(content.decode('utf-8'), trans_id)
1195
class DiskTreeTransform(TreeTransformBase):
1196
"""Tree transform storing its contents on disk."""
1198
def __init__(self, tree, limbodir, pb=None,
1199
case_sensitive=True):
1201
:param tree: The tree that will be transformed, but not necessarily
1203
:param limbodir: A directory where new files can be stored until
1204
they are installed in their proper places
1206
:param case_sensitive: If True, the target of the transform is
1207
case sensitive, not just case preserving.
1209
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1210
self._limbodir = limbodir
1211
self._deletiondir = None
1212
# A mapping of transform ids to their limbo filename
1213
self._limbo_files = {}
1214
self._possibly_stale_limbo_files = set()
1215
# A mapping of transform ids to a set of the transform ids of children
1216
# that their limbo directory has
1217
self._limbo_children = {}
1218
# Map transform ids to maps of child filename to child transform id
1219
self._limbo_children_names = {}
1220
# List of transform ids that need to be renamed from limbo into place
1221
self._needs_rename = set()
1222
self._creation_mtime = None
1225
"""Release the working tree lock, if held, clean up limbo dir.
1227
This is required if apply has not been invoked, but can be invoked
1230
if self._tree is None:
1233
limbo_paths = list(viewvalues(self._limbo_files))
1234
limbo_paths.extend(self._possibly_stale_limbo_files)
1235
limbo_paths.sort(reverse=True)
1236
for path in limbo_paths:
1239
except OSError as e:
1240
if e.errno != errno.ENOENT:
1242
# XXX: warn? perhaps we just got interrupted at an
1243
# inconvenient moment, but perhaps files are disappearing
1246
delete_any(self._limbodir)
1248
# We don't especially care *why* the dir is immortal.
1249
raise ImmortalLimbo(self._limbodir)
1251
if self._deletiondir is not None:
1252
delete_any(self._deletiondir)
1254
raise errors.ImmortalPendingDeletion(self._deletiondir)
1256
TreeTransformBase.finalize(self)
1258
def _limbo_supports_executable(self):
1259
"""Check if the limbo path supports the executable bit."""
1260
# FIXME: Check actual file system capabilities of limbodir
1261
return osutils.supports_executable()
1263
def _limbo_name(self, trans_id):
1264
"""Generate the limbo name of a file"""
1265
limbo_name = self._limbo_files.get(trans_id)
1266
if limbo_name is None:
1267
limbo_name = self._generate_limbo_path(trans_id)
1268
self._limbo_files[trans_id] = limbo_name
1271
def _generate_limbo_path(self, trans_id):
1272
"""Generate a limbo path using the trans_id as the relative path.
1274
This is suitable as a fallback, and when the transform should not be
1275
sensitive to the path encoding of the limbo directory.
1277
self._needs_rename.add(trans_id)
1278
return pathjoin(self._limbodir, trans_id)
1280
def adjust_path(self, name, parent, trans_id):
1281
previous_parent = self._new_parent.get(trans_id)
1282
previous_name = self._new_name.get(trans_id)
1283
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1284
if (trans_id in self._limbo_files and
1285
trans_id not in self._needs_rename):
1286
self._rename_in_limbo([trans_id])
1287
if previous_parent != parent:
1288
self._limbo_children[previous_parent].remove(trans_id)
1289
if previous_parent != parent or previous_name != name:
1290
del self._limbo_children_names[previous_parent][previous_name]
1292
def _rename_in_limbo(self, trans_ids):
1293
"""Fix limbo names so that the right final path is produced.
1295
This means we outsmarted ourselves-- we tried to avoid renaming
1296
these files later by creating them with their final names in their
1297
final parents. But now the previous name or parent is no longer
1298
suitable, so we have to rename them.
1300
Even for trans_ids that have no new contents, we must remove their
1301
entries from _limbo_files, because they are now stale.
1303
for trans_id in trans_ids:
1304
old_path = self._limbo_files[trans_id]
1305
self._possibly_stale_limbo_files.add(old_path)
1306
del self._limbo_files[trans_id]
1307
if trans_id not in self._new_contents:
1309
new_path = self._limbo_name(trans_id)
1310
os.rename(old_path, new_path)
1311
self._possibly_stale_limbo_files.remove(old_path)
1312
for descendant in self._limbo_descendants(trans_id):
1313
desc_path = self._limbo_files[descendant]
1314
desc_path = new_path + desc_path[len(old_path):]
1315
self._limbo_files[descendant] = desc_path
1317
def _limbo_descendants(self, trans_id):
1318
"""Return the set of trans_ids whose limbo paths descend from this."""
1319
descendants = set(self._limbo_children.get(trans_id, []))
1320
for descendant in list(descendants):
1321
descendants.update(self._limbo_descendants(descendant))
1324
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1325
"""Schedule creation of a new file.
1329
:param contents: an iterator of strings, all of which will be written
1330
to the target destination.
1331
:param trans_id: TreeTransform handle
1332
:param mode_id: If not None, force the mode of the target file to match
1333
the mode of the object referenced by mode_id.
1334
Otherwise, we will try to preserve mode bits of an existing file.
1335
:param sha1: If the sha1 of this content is already known, pass it in.
1336
We can use it to prevent future sha1 computations.
1338
name = self._limbo_name(trans_id)
1339
with open(name, 'wb') as f:
1340
unique_add(self._new_contents, trans_id, 'file')
1341
f.writelines(contents)
1342
self._set_mtime(name)
1343
self._set_mode(trans_id, mode_id, S_ISREG)
1344
# It is unfortunate we have to use lstat instead of fstat, but we just
1345
# used utime and chmod on the file, so we need the accurate final
1347
if sha1 is not None:
1348
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1350
def _read_symlink_target(self, trans_id):
1351
return os.readlink(self._limbo_name(trans_id))
1353
def _set_mtime(self, path):
1354
"""All files that are created get the same mtime.
1356
This time is set by the first object to be created.
1358
if self._creation_mtime is None:
1359
self._creation_mtime = time.time()
1360
os.utime(path, (self._creation_mtime, self._creation_mtime))
1362
def create_hardlink(self, path, trans_id):
1363
"""Schedule creation of a hard link"""
1364
name = self._limbo_name(trans_id)
1367
except OSError as e:
1368
if e.errno != errno.EPERM:
1370
raise errors.HardLinkNotSupported(path)
1372
unique_add(self._new_contents, trans_id, 'file')
1374
# Clean up the file, it never got registered so
1375
# TreeTransform.finalize() won't clean it up.
1379
def create_directory(self, trans_id):
1380
"""Schedule creation of a new directory.
1382
See also new_directory.
1384
os.mkdir(self._limbo_name(trans_id))
1385
unique_add(self._new_contents, trans_id, 'directory')
1387
def create_symlink(self, target, trans_id):
1388
"""Schedule creation of a new symbolic link.
1390
target is a bytestring.
1391
See also new_symlink.
1394
os.symlink(target, self._limbo_name(trans_id))
1395
unique_add(self._new_contents, trans_id, 'symlink')
1398
path = FinalPaths(self).get_path(trans_id)
1401
raise UnableCreateSymlink(path=path)
1403
def cancel_creation(self, trans_id):
1404
"""Cancel the creation of new file contents."""
1405
del self._new_contents[trans_id]
1406
if trans_id in self._observed_sha1s:
1407
del self._observed_sha1s[trans_id]
1408
children = self._limbo_children.get(trans_id)
1409
# if this is a limbo directory with children, move them before removing
1411
if children is not None:
1412
self._rename_in_limbo(children)
1413
del self._limbo_children[trans_id]
1414
del self._limbo_children_names[trans_id]
1415
delete_any(self._limbo_name(trans_id))
1417
def new_orphan(self, trans_id, parent_id):
1418
conf = self._tree.get_config_stack()
1419
handle_orphan = conf.get('transform.orphan_policy')
1420
handle_orphan(self, trans_id, parent_id)
1423
class OrphaningError(errors.BzrError):
1425
# Only bugs could lead to such exception being seen by the user
1426
internal_error = True
1427
_fmt = "Error while orphaning %s in %s directory"
1429
def __init__(self, orphan, parent):
1430
errors.BzrError.__init__(self)
1431
self.orphan = orphan
1432
self.parent = parent
1435
class OrphaningForbidden(OrphaningError):
1437
_fmt = "Policy: %s doesn't allow creating orphans."
1439
def __init__(self, policy):
1440
errors.BzrError.__init__(self)
1441
self.policy = policy
1444
def move_orphan(tt, orphan_id, parent_id):
1445
"""See TreeTransformBase.new_orphan.
1447
This creates a new orphan in the `brz-orphans` dir at the root of the
1450
:param tt: The TreeTransform orphaning `trans_id`.
1452
:param orphan_id: The trans id that should be orphaned.
1454
:param parent_id: The orphan parent trans id.
1456
# Add the orphan dir if it doesn't exist
1457
orphan_dir_basename = 'brz-orphans'
1458
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1459
if tt.final_kind(od_id) is None:
1460
tt.create_directory(od_id)
1461
parent_path = tt._tree_id_paths[parent_id]
1462
# Find a name that doesn't exist yet in the orphan dir
1463
actual_name = tt.final_name(orphan_id)
1464
new_name = tt._available_backup_name(actual_name, od_id)
1465
tt.adjust_path(new_name, od_id, orphan_id)
1466
trace.warning('%s has been orphaned in %s'
1467
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1470
def refuse_orphan(tt, orphan_id, parent_id):
1471
"""See TreeTransformBase.new_orphan.
1473
This refuses to create orphan, letting the caller handle the conflict.
1475
raise OrphaningForbidden('never')
1478
orphaning_registry = registry.Registry()
1479
orphaning_registry.register(
1480
u'conflict', refuse_orphan,
1481
'Leave orphans in place and create a conflict on the directory.')
1482
orphaning_registry.register(
1483
u'move', move_orphan,
1484
'Move orphans into the brz-orphans directory.')
1485
orphaning_registry._set_default_key(u'conflict')
1488
opt_transform_orphan = _mod_config.RegistryOption(
1489
'transform.orphan_policy', orphaning_registry,
1490
help='Policy for orphaned files during transform operations.',
1494
class TreeTransform(DiskTreeTransform):
1495
"""Represent a tree transformation.
1497
This object is designed to support incremental generation of the transform,
1500
However, it gives optimum performance when parent directories are created
1501
before their contents. The transform is then able to put child files
1502
directly in their parent directory, avoiding later renames.
1504
It is easy to produce malformed transforms, but they are generally
1505
harmless. Attempting to apply a malformed transform will cause an
1506
exception to be raised before any modifications are made to the tree.
1508
Many kinds of malformed transforms can be corrected with the
1509
resolve_conflicts function. The remaining ones indicate programming error,
1510
such as trying to create a file with no path.
1512
Two sets of file creation methods are supplied. Convenience methods are:
1517
These are composed of the low-level methods:
1519
* create_file or create_directory or create_symlink
1523
Transform/Transaction ids
1524
-------------------------
1525
trans_ids are temporary ids assigned to all files involved in a transform.
1526
It's possible, even common, that not all files in the Tree have trans_ids.
1528
trans_ids are used because filenames and file_ids are not good enough
1529
identifiers; filenames change, and not all files have file_ids. File-ids
1530
are also associated with trans-ids, so that moving a file moves its
1533
trans_ids are only valid for the TreeTransform that generated them.
1537
Limbo is a temporary directory use to hold new versions of files.
1538
Files are added to limbo by create_file, create_directory, create_symlink,
1539
and their convenience variants (new_*). Files may be removed from limbo
1540
using cancel_creation. Files are renamed from limbo into their final
1541
location as part of TreeTransform.apply
1543
Limbo must be cleaned up, by either calling TreeTransform.apply or
1544
calling TreeTransform.finalize.
1546
Files are placed into limbo inside their parent directories, where
1547
possible. This reduces subsequent renames, and makes operations involving
1548
lots of files faster. This optimization is only possible if the parent
1549
directory is created *before* creating any of its children, so avoid
1550
creating children before parents, where possible.
1554
This temporary directory is used by _FileMover for storing files that are
1555
about to be deleted. In case of rollback, the files will be restored.
1556
FileMover does not delete files until it is sure that a rollback will not
1559
def __init__(self, tree, pb=None):
1560
"""Note: a tree_write lock is taken on the tree.
1562
Use TreeTransform.finalize() to release the lock (can be omitted if
1563
TreeTransform.apply() called).
1565
tree.lock_tree_write()
1567
limbodir = urlutils.local_path_from_url(
1568
tree._transport.abspath('limbo'))
1569
osutils.ensure_empty_directory_exists(
1571
errors.ExistingLimbo)
1572
deletiondir = urlutils.local_path_from_url(
1573
tree._transport.abspath('pending-deletion'))
1574
osutils.ensure_empty_directory_exists(
1576
errors.ExistingPendingDeletion)
1581
# Cache of realpath results, to speed up canonical_path
1582
self._realpaths = {}
1583
# Cache of relpath results, to speed up canonical_path
1585
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1586
tree.case_sensitive)
1587
self._deletiondir = deletiondir
1589
def canonical_path(self, path):
1590
"""Get the canonical tree-relative path"""
1591
# don't follow final symlinks
1592
abs = self._tree.abspath(path)
1593
if abs in self._relpaths:
1594
return self._relpaths[abs]
1595
dirname, basename = os.path.split(abs)
1596
if dirname not in self._realpaths:
1597
self._realpaths[dirname] = os.path.realpath(dirname)
1598
dirname = self._realpaths[dirname]
1599
abs = pathjoin(dirname, basename)
1600
if dirname in self._relpaths:
1601
relpath = pathjoin(self._relpaths[dirname], basename)
1602
relpath = relpath.rstrip('/\\')
1604
relpath = self._tree.relpath(abs)
1605
self._relpaths[abs] = relpath
1608
def tree_kind(self, trans_id):
1609
"""Determine the file kind in the working tree.
1611
:returns: The file kind or None if the file does not exist
1613
path = self._tree_id_paths.get(trans_id)
1617
return file_kind(self._tree.abspath(path))
1618
except errors.NoSuchFile:
1621
def _set_mode(self, trans_id, mode_id, typefunc):
1622
"""Set the mode of new file contents.
1623
The mode_id is the existing file to get the mode from (often the same
1624
as trans_id). The operation is only performed if there's a mode match
1625
according to typefunc.
1630
old_path = self._tree_id_paths[mode_id]
1634
mode = os.stat(self._tree.abspath(old_path)).st_mode
1635
except OSError as e:
1636
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1637
# Either old_path doesn't exist, or the parent of the
1638
# target is not a directory (but will be one eventually)
1639
# Either way, we know it doesn't exist *right now*
1640
# See also bug #248448
1645
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1647
def iter_tree_children(self, parent_id):
1648
"""Iterate through the entry's tree children, if any"""
1650
path = self._tree_id_paths[parent_id]
1654
children = os.listdir(self._tree.abspath(path))
1655
except OSError as e:
1656
if not (osutils._is_error_enotdir(e)
1657
or e.errno in (errno.ENOENT, errno.ESRCH)):
1661
for child in children:
1662
childpath = joinpath(path, child)
1663
if self._tree.is_control_filename(childpath):
1665
yield self.trans_id_tree_path(childpath)
1667
def _generate_limbo_path(self, trans_id):
1668
"""Generate a limbo path using the final path if possible.
1670
This optimizes the performance of applying the tree transform by
1671
avoiding renames. These renames can be avoided only when the parent
1672
directory is already scheduled for creation.
1674
If the final path cannot be used, falls back to using the trans_id as
1677
parent = self._new_parent.get(trans_id)
1678
# if the parent directory is already in limbo (e.g. when building a
1679
# tree), choose a limbo name inside the parent, to reduce further
1681
use_direct_path = False
1682
if self._new_contents.get(parent) == 'directory':
1683
filename = self._new_name.get(trans_id)
1684
if filename is not None:
1685
if parent not in self._limbo_children:
1686
self._limbo_children[parent] = set()
1687
self._limbo_children_names[parent] = {}
1688
use_direct_path = True
1689
# the direct path can only be used if no other file has
1690
# already taken this pathname, i.e. if the name is unused, or
1691
# if it is already associated with this trans_id.
1692
elif self._case_sensitive_target:
1693
if (self._limbo_children_names[parent].get(filename)
1694
in (trans_id, None)):
1695
use_direct_path = True
1697
for l_filename, l_trans_id in viewitems(
1698
self._limbo_children_names[parent]):
1699
if l_trans_id == trans_id:
1701
if l_filename.lower() == filename.lower():
1704
use_direct_path = True
1706
if not use_direct_path:
1707
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1709
limbo_name = pathjoin(self._limbo_files[parent], filename)
1710
self._limbo_children[parent].add(trans_id)
1711
self._limbo_children_names[parent][filename] = trans_id
1714
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1715
"""Apply all changes to the inventory and filesystem.
1717
If filesystem or inventory conflicts are present, MalformedTransform
1720
If apply succeeds, finalize is not necessary.
1722
:param no_conflicts: if True, the caller guarantees there are no
1723
conflicts, so no check is made.
1724
:param precomputed_delta: An inventory delta to use instead of
1726
:param _mover: Supply an alternate FileMover, for testing
1728
for hook in MutableTree.hooks['pre_transform']:
1729
hook(self._tree, self)
1730
if not no_conflicts:
1731
self._check_malformed()
1732
with ui.ui_factory.nested_progress_bar() as child_pb:
1733
if precomputed_delta is None:
1734
child_pb.update(gettext('Apply phase'), 0, 2)
1735
inventory_delta = self._generate_inventory_delta()
1738
inventory_delta = precomputed_delta
1741
mover = _FileMover()
1745
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1746
self._apply_removals(mover)
1747
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1748
modified_paths = self._apply_insertions(mover)
1753
mover.apply_deletions()
1754
if self.final_file_id(self.root) is None:
1755
inventory_delta = [e for e in inventory_delta if e[0] != '']
1756
self._tree.apply_inventory_delta(inventory_delta)
1757
self._apply_observed_sha1s()
1760
return _TransformResults(modified_paths, self.rename_count)
1762
def _generate_inventory_delta(self):
1763
"""Generate an inventory delta for the current transform."""
1764
inventory_delta = []
1765
new_paths = self._inventory_altered()
1766
total_entries = len(new_paths) + len(self._removed_id)
1767
with ui.ui_factory.nested_progress_bar() as child_pb:
1768
for num, trans_id in enumerate(self._removed_id):
1770
child_pb.update(gettext('removing file'), num, total_entries)
1771
if trans_id == self._new_root:
1772
file_id = self._tree.get_root_id()
1774
file_id = self.tree_file_id(trans_id)
1775
# File-id isn't really being deleted, just moved
1776
if file_id in self._r_new_id:
1778
path = self._tree_id_paths[trans_id]
1779
inventory_delta.append((path, None, file_id, None))
1780
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1783
for num, (path, trans_id) in enumerate(new_paths):
1785
child_pb.update(gettext('adding file'),
1786
num + len(self._removed_id), total_entries)
1787
file_id = new_path_file_ids[trans_id]
1791
kind = self.final_kind(trans_id)
1793
kind = self._tree.stored_kind(
1794
self._tree.id2path(file_id), file_id)
1795
parent_trans_id = self.final_parent(trans_id)
1796
parent_file_id = new_path_file_ids.get(parent_trans_id)
1797
if parent_file_id is None:
1798
parent_file_id = self.final_file_id(parent_trans_id)
1799
if trans_id in self._new_reference_revision:
1800
new_entry = inventory.TreeReference(
1802
self._new_name[trans_id],
1803
self.final_file_id(self._new_parent[trans_id]),
1804
None, self._new_reference_revision[trans_id])
1806
new_entry = inventory.make_entry(kind,
1807
self.final_name(trans_id),
1808
parent_file_id, file_id)
1810
old_path = self._tree.id2path(new_entry.file_id)
1811
except errors.NoSuchId:
1813
new_executability = self._new_executability.get(trans_id)
1814
if new_executability is not None:
1815
new_entry.executable = new_executability
1816
inventory_delta.append(
1817
(old_path, path, new_entry.file_id, new_entry))
1818
return inventory_delta
1820
def _apply_removals(self, mover):
1821
"""Perform tree operations that remove directory/inventory names.
1823
That is, delete files that are to be deleted, and put any files that
1824
need renaming into limbo. This must be done in strict child-to-parent
1827
If inventory_delta is None, no inventory delta generation is performed.
1829
tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
1830
with ui.ui_factory.nested_progress_bar() as child_pb:
1831
for num, (path, trans_id) in enumerate(tree_paths):
1832
# do not attempt to move root into a subdirectory of itself.
1835
child_pb.update(gettext('removing file'), num, len(tree_paths))
1836
full_path = self._tree.abspath(path)
1837
if trans_id in self._removed_contents:
1838
delete_path = os.path.join(self._deletiondir, trans_id)
1839
mover.pre_delete(full_path, delete_path)
1840
elif (trans_id in self._new_name
1841
or trans_id in self._new_parent):
1843
mover.rename(full_path, self._limbo_name(trans_id))
1844
except errors.TransformRenameFailed as e:
1845
if e.errno != errno.ENOENT:
1848
self.rename_count += 1
1850
def _apply_insertions(self, mover):
1851
"""Perform tree operations that insert directory/inventory names.
1853
That is, create any files that need to be created, and restore from
1854
limbo any files that needed renaming. This must be done in strict
1855
parent-to-child order.
1857
If inventory_delta is None, no inventory delta is calculated, and
1858
no list of modified paths is returned.
1860
new_paths = self.new_paths(filesystem_only=True)
1862
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1864
with ui.ui_factory.nested_progress_bar() as child_pb:
1865
for num, (path, trans_id) in enumerate(new_paths):
1867
child_pb.update(gettext('adding file'), num, len(new_paths))
1868
full_path = self._tree.abspath(path)
1869
if trans_id in self._needs_rename:
1871
mover.rename(self._limbo_name(trans_id), full_path)
1872
except errors.TransformRenameFailed as e:
1873
# We may be renaming a dangling inventory id
1874
if e.errno != errno.ENOENT:
1877
self.rename_count += 1
1878
# TODO: if trans_id in self._observed_sha1s, we should
1879
# re-stat the final target, since ctime will be
1880
# updated by the change.
1881
if (trans_id in self._new_contents or
1882
self.path_changed(trans_id)):
1883
if trans_id in self._new_contents:
1884
modified_paths.append(full_path)
1885
if trans_id in self._new_executability:
1886
self._set_executability(path, trans_id)
1887
if trans_id in self._observed_sha1s:
1888
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1889
st = osutils.lstat(full_path)
1890
self._observed_sha1s[trans_id] = (o_sha1, st)
1891
for path, trans_id in new_paths:
1892
# new_paths includes stuff like workingtree conflicts. Only the
1893
# stuff in new_contents actually comes from limbo.
1894
if trans_id in self._limbo_files:
1895
del self._limbo_files[trans_id]
1896
self._new_contents.clear()
1897
return modified_paths
1899
def _apply_observed_sha1s(self):
1900
"""After we have finished renaming everything, update observed sha1s
1902
This has to be done after self._tree.apply_inventory_delta, otherwise
1903
it doesn't know anything about the files we are updating. Also, we want
1904
to do this as late as possible, so that most entries end up cached.
1906
# TODO: this doesn't update the stat information for directories. So
1907
# the first 'bzr status' will still need to rewrite
1908
# .bzr/checkout/dirstate. However, we at least don't need to
1909
# re-read all of the files.
1910
# TODO: If the operation took a while, we could do a time.sleep(3) here
1911
# to allow the clock to tick over and ensure we won't have any
1912
# problems. (we could observe start time, and finish time, and if
1913
# it is less than eg 10% overhead, add a sleep call.)
1914
paths = FinalPaths(self)
1915
for trans_id, observed in viewitems(self._observed_sha1s):
1916
path = paths.get_path(trans_id)
1917
# We could get the file_id, but dirstate prefers to use the path
1918
# anyway, and it is 'cheaper' to determine.
1919
# file_id = self._new_id[trans_id]
1920
self._tree._observed_sha1(None, path, observed)
1923
class TransformPreview(DiskTreeTransform):
1924
"""A TreeTransform for generating preview trees.
1926
Unlike TreeTransform, this version works when the input tree is a
1927
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1928
unversioned files in the input tree.
1931
def __init__(self, tree, pb=None, case_sensitive=True):
1933
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1934
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1936
def canonical_path(self, path):
1939
def tree_kind(self, trans_id):
1940
path = self._tree_id_paths.get(trans_id)
1943
kind = self._tree.path_content_summary(path)[0]
1944
if kind == 'missing':
1948
def _set_mode(self, trans_id, mode_id, typefunc):
1949
"""Set the mode of new file contents.
1950
The mode_id is the existing file to get the mode from (often the same
1951
as trans_id). The operation is only performed if there's a mode match
1952
according to typefunc.
1954
# is it ok to ignore this? probably
1957
def iter_tree_children(self, parent_id):
1958
"""Iterate through the entry's tree children, if any"""
1960
path = self._tree_id_paths[parent_id]
1963
entry = next(self._tree.iter_entries_by_dir(
1964
specific_files=[path]))[1]
1965
children = getattr(entry, 'children', {})
1966
for child in children:
1967
childpath = joinpath(path, child)
1968
yield self.trans_id_tree_path(childpath)
1970
def new_orphan(self, trans_id, parent_id):
1971
raise NotImplementedError(self.new_orphan)
1974
class _PreviewTree(inventorytree.InventoryTree):
1975
"""Partial implementation of Tree to support show_diff_trees"""
1977
def __init__(self, transform):
1978
self._transform = transform
1979
self._final_paths = FinalPaths(transform)
1980
self.__by_parent = None
1981
self._parent_ids = []
1982
self._all_children_cache = {}
1983
self._path2trans_id_cache = {}
1984
self._final_name_cache = {}
1985
self._iter_changes_cache = dict((c[0], c) for c in
1986
self._transform.iter_changes())
1988
def _content_change(self, file_id):
1989
"""Return True if the content of this file changed"""
1990
changes = self._iter_changes_cache.get(file_id)
1991
# changes[2] is true if the file content changed. See
1992
# InterTree.iter_changes.
1993
return (changes is not None and changes[2])
1995
def _get_repository(self):
1996
repo = getattr(self._transform._tree, '_repository', None)
1998
repo = self._transform._tree.branch.repository
2001
def _iter_parent_trees(self):
2002
for revision_id in self.get_parent_ids():
2004
yield self.revision_tree(revision_id)
2005
except errors.NoSuchRevisionInTree:
2006
yield self._get_repository().revision_tree(revision_id)
2008
def _get_file_revision(self, path, file_id, vf, tree_revision):
2010
(file_id, t.get_file_revision(t.id2path(file_id), file_id))
2011
for t in self._iter_parent_trees()]
2012
vf.add_lines((file_id, tree_revision), parent_keys,
2013
self.get_file_lines(path, file_id))
2014
repo = self._get_repository()
2015
base_vf = repo.texts
2016
if base_vf not in vf.fallback_versionedfiles:
2017
vf.fallback_versionedfiles.append(base_vf)
2018
return tree_revision
2020
def _stat_limbo_file(self, trans_id):
2021
name = self._transform._limbo_name(trans_id)
2022
return os.lstat(name)
2025
def _by_parent(self):
2026
if self.__by_parent is None:
2027
self.__by_parent = self._transform.by_parent()
2028
return self.__by_parent
2030
def _comparison_data(self, entry, path):
2031
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2032
if kind == 'missing':
2036
file_id = self._transform.final_file_id(self._path2trans_id(path))
2037
executable = self.is_executable(path, file_id)
2038
return kind, executable, None
2040
def is_locked(self):
2043
def lock_read(self):
2044
# Perhaps in theory, this should lock the TreeTransform?
2045
return lock.LogicalLockResult(self.unlock)
2051
def root_inventory(self):
2052
"""This Tree does not use inventory as its backing data."""
2053
raise NotImplementedError(_PreviewTree.root_inventory)
2055
def get_root_id(self):
2056
return self._transform.final_file_id(self._transform.root)
2058
def all_file_ids(self):
2059
tree_ids = set(self._transform._tree.all_file_ids())
2060
tree_ids.difference_update(self._transform.tree_file_id(t)
2061
for t in self._transform._removed_id)
2062
tree_ids.update(viewvalues(self._transform._new_id))
2065
def all_versioned_paths(self):
2066
return {self.id2path(fid) for fid in self.all_file_ids()}
2068
def _has_id(self, file_id, fallback_check):
2069
if file_id in self._transform._r_new_id:
2071
elif file_id in {self._transform.tree_file_id(trans_id) for
2072
trans_id in self._transform._removed_id}:
2075
return fallback_check(file_id)
2077
def has_id(self, file_id):
2078
return self._has_id(file_id, self._transform._tree.has_id)
2080
def has_or_had_id(self, file_id):
2081
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2083
def _path2trans_id(self, path):
2084
# We must not use None here, because that is a valid value to store.
2085
trans_id = self._path2trans_id_cache.get(path, object)
2086
if trans_id is not object:
2088
segments = splitpath(path)
2089
cur_parent = self._transform.root
2090
for cur_segment in segments:
2091
for child in self._all_children(cur_parent):
2092
final_name = self._final_name_cache.get(child)
2093
if final_name is None:
2094
final_name = self._transform.final_name(child)
2095
self._final_name_cache[child] = final_name
2096
if final_name == cur_segment:
2100
self._path2trans_id_cache[path] = None
2102
self._path2trans_id_cache[path] = cur_parent
2105
def path2id(self, path):
2106
if isinstance(path, list):
2109
path = osutils.pathjoin(*path)
2110
return self._transform.final_file_id(self._path2trans_id(path))
2112
def id2path(self, file_id):
2113
trans_id = self._transform.trans_id_file_id(file_id)
2115
return self._final_paths._determine_path(trans_id)
2117
raise errors.NoSuchId(self, file_id)
2119
def _all_children(self, trans_id):
2120
children = self._all_children_cache.get(trans_id)
2121
if children is not None:
2123
children = set(self._transform.iter_tree_children(trans_id))
2124
# children in the _new_parent set are provided by _by_parent.
2125
children.difference_update(self._transform._new_parent)
2126
children.update(self._by_parent.get(trans_id, []))
2127
self._all_children_cache[trans_id] = children
2130
def _iter_children(self, file_id):
2131
trans_id = self._transform.trans_id_file_id(file_id)
2132
for child_trans_id in self._all_children(trans_id):
2133
yield self._transform.final_file_id(child_trans_id)
2136
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2137
in self._transform._tree.extras())
2138
possible_extras.update(self._transform._new_contents)
2139
possible_extras.update(self._transform._removed_id)
2140
for trans_id in possible_extras:
2141
if self._transform.final_file_id(trans_id) is None:
2142
yield self._final_paths._determine_path(trans_id)
2144
def _make_inv_entries(self, ordered_entries, specific_files=None):
2145
for trans_id, parent_file_id in ordered_entries:
2146
file_id = self._transform.final_file_id(trans_id)
2149
if (specific_files is not None and
2150
self._final_paths.get_path(trans_id) not in specific_files):
2152
kind = self._transform.final_kind(trans_id)
2154
kind = self._transform._tree.stored_kind(
2155
self._transform._tree.id2path(file_id),
2157
new_entry = inventory.make_entry(
2159
self._transform.final_name(trans_id),
2160
parent_file_id, file_id)
2161
yield new_entry, trans_id
2163
def _list_files_by_dir(self):
2164
todo = [ROOT_PARENT]
2166
while len(todo) > 0:
2168
parent_file_id = self._transform.final_file_id(parent)
2169
children = list(self._all_children(parent))
2170
paths = dict(zip(children, self._final_paths.get_paths(children)))
2171
children.sort(key=paths.get)
2172
todo.extend(reversed(children))
2173
for trans_id in children:
2174
ordered_ids.append((trans_id, parent_file_id))
2177
def iter_child_entries(self, path, file_id=None):
2178
trans_id = self._path2trans_id(path)
2179
if trans_id is None:
2180
raise errors.NoSuchFile(path)
2181
todo = [(child_trans_id, trans_id) for child_trans_id in
2182
self._all_children(trans_id)]
2183
for entry, trans_id in self._make_inv_entries(todo):
2186
def iter_entries_by_dir(self, specific_files=None):
2187
# This may not be a maximally efficient implementation, but it is
2188
# reasonably straightforward. An implementation that grafts the
2189
# TreeTransform changes onto the tree's iter_entries_by_dir results
2190
# might be more efficient, but requires tricky inferences about stack
2192
ordered_ids = self._list_files_by_dir()
2193
for entry, trans_id in self._make_inv_entries(ordered_ids,
2195
yield self._final_paths.get_path(trans_id), entry
2197
def _iter_entries_for_dir(self, dir_path):
2198
"""Return path, entry for items in a directory without recursing down."""
2200
dir_trans_id = self._path2trans_id(dir_path)
2201
dir_id = self._transform.final_file_id(dir_trans_id)
2202
for child_trans_id in self._all_children(dir_trans_id):
2203
ordered_ids.append((child_trans_id, dir_id))
2205
for entry, trans_id in self._make_inv_entries(ordered_ids):
2206
path_entries.append((self._final_paths.get_path(trans_id), entry))
2210
def list_files(self, include_root=False, from_dir=None, recursive=True):
2211
"""See WorkingTree.list_files."""
2212
# XXX This should behave like WorkingTree.list_files, but is really
2213
# more like RevisionTree.list_files.
2217
prefix = from_dir + '/'
2218
entries = self.iter_entries_by_dir()
2219
for path, entry in entries:
2220
if entry.name == '' and not include_root:
2223
if not path.startswith(prefix):
2225
path = path[len(prefix):]
2226
yield path, 'V', entry.kind, entry.file_id, entry
2228
if from_dir is None and include_root is True:
2229
root_entry = inventory.make_entry('directory', '',
2230
ROOT_PARENT, self.get_root_id())
2231
yield '', 'V', 'directory', root_entry.file_id, root_entry
2232
entries = self._iter_entries_for_dir(from_dir or '')
2233
for path, entry in entries:
2234
yield path, 'V', entry.kind, entry.file_id, entry
2236
def kind(self, path, file_id=None):
2237
trans_id = self._path2trans_id(path)
2238
if trans_id is None:
2239
raise errors.NoSuchFile(path)
2240
return self._transform.final_kind(trans_id)
2242
def stored_kind(self, path, file_id=None):
2243
trans_id = self._path2trans_id(path)
2244
if trans_id is None:
2245
raise errors.NoSuchFile(path)
2247
return self._transform._new_contents[trans_id]
2249
return self._transform._tree.stored_kind(path, file_id)
2251
def get_file_mtime(self, path, file_id=None):
2252
"""See Tree.get_file_mtime"""
2254
file_id = self.path2id(path)
2256
raise errors.NoSuchFile(path)
2257
if not self._content_change(file_id):
2258
return self._transform._tree.get_file_mtime(
2259
self._transform._tree.id2path(file_id), file_id)
2260
trans_id = self._path2trans_id(path)
2261
return self._stat_limbo_file(trans_id).st_mtime
2263
def get_file_size(self, path, file_id=None):
2264
"""See Tree.get_file_size"""
2265
trans_id = self._path2trans_id(path)
2266
if trans_id is None:
2267
raise errors.NoSuchFile(path)
2268
kind = self._transform.final_kind(trans_id)
2271
if trans_id in self._transform._new_contents:
2272
return self._stat_limbo_file(trans_id).st_size
2273
if self.kind(path, file_id) == 'file':
2274
return self._transform._tree.get_file_size(path, file_id)
2278
def get_file_verifier(self, path, file_id=None, stat_value=None):
2279
trans_id = self._path2trans_id(path)
2280
if trans_id is None:
2281
raise errors.NoSuchFile(path)
2282
kind = self._transform._new_contents.get(trans_id)
2284
return self._transform._tree.get_file_verifier(path, file_id)
2286
with self.get_file(path, file_id) as fileobj:
2287
return ("SHA1", sha_file(fileobj))
2289
def get_file_sha1(self, path, file_id=None, stat_value=None):
2290
trans_id = self._path2trans_id(path)
2291
if trans_id is None:
2292
raise errors.NoSuchFile(path)
2293
kind = self._transform._new_contents.get(trans_id)
2295
return self._transform._tree.get_file_sha1(path, file_id)
2297
with self.get_file(path, file_id) as fileobj:
2298
return sha_file(fileobj)
2300
def is_executable(self, path, file_id=None):
2301
trans_id = self._path2trans_id(path)
2302
if trans_id is None:
2305
return self._transform._new_executability[trans_id]
2308
return self._transform._tree.is_executable(path, file_id)
2309
except OSError as e:
2310
if e.errno == errno.ENOENT:
2313
except errors.NoSuchFile:
2316
def has_filename(self, path):
2317
trans_id = self._path2trans_id(path)
2318
if trans_id in self._transform._new_contents:
2320
elif trans_id in self._transform._removed_contents:
2323
return self._transform._tree.has_filename(path)
2325
def path_content_summary(self, path):
2326
trans_id = self._path2trans_id(path)
2327
tt = self._transform
2328
tree_path = tt._tree_id_paths.get(trans_id)
2329
kind = tt._new_contents.get(trans_id)
2331
if tree_path is None or trans_id in tt._removed_contents:
2332
return 'missing', None, None, None
2333
summary = tt._tree.path_content_summary(tree_path)
2334
kind, size, executable, link_or_sha1 = summary
2337
limbo_name = tt._limbo_name(trans_id)
2338
if trans_id in tt._new_reference_revision:
2339
kind = 'tree-reference'
2341
statval = os.lstat(limbo_name)
2342
size = statval.st_size
2343
if not tt._limbo_supports_executable():
2346
executable = statval.st_mode & S_IEXEC
2350
if kind == 'symlink':
2351
link_or_sha1 = os.readlink(limbo_name)
2352
if not isinstance(link_or_sha1, text_type):
2353
link_or_sha1 = link_or_sha1.decode(osutils._fs_enc)
2354
executable = tt._new_executability.get(trans_id, executable)
2355
return kind, size, executable, link_or_sha1
2357
def iter_changes(self, from_tree, include_unchanged=False,
2358
specific_files=None, pb=None, extra_trees=None,
2359
require_versioned=True, want_unversioned=False):
2360
"""See InterTree.iter_changes.
2362
This has a fast path that is only used when the from_tree matches
2363
the transform tree, and no fancy options are supplied.
2365
if (from_tree is not self._transform._tree or include_unchanged or
2366
specific_files or want_unversioned):
2367
return tree.InterTree(from_tree, self).iter_changes(
2368
include_unchanged=include_unchanged,
2369
specific_files=specific_files,
2371
extra_trees=extra_trees,
2372
require_versioned=require_versioned,
2373
want_unversioned=want_unversioned)
2374
if want_unversioned:
2375
raise ValueError('want_unversioned is not supported')
2376
return self._transform.iter_changes()
2378
def get_file(self, path, file_id=None):
2379
"""See Tree.get_file"""
2381
file_id = self.path2id(path)
2382
if not self._content_change(file_id):
2383
return self._transform._tree.get_file(path, file_id)
2384
trans_id = self._path2trans_id(path)
2385
name = self._transform._limbo_name(trans_id)
2386
return open(name, 'rb')
2388
def get_file_with_stat(self, path, file_id=None):
2389
return self.get_file(path, file_id), None
2391
def annotate_iter(self, path, file_id=None,
2392
default_revision=_mod_revision.CURRENT_REVISION):
2394
file_id = self.path2id(path)
2395
changes = self._iter_changes_cache.get(file_id)
2399
changed_content, versioned, kind = (changes[2], changes[3],
2403
get_old = (kind[0] == 'file' and versioned[0])
2405
old_annotation = self._transform._tree.annotate_iter(
2406
path, file_id=file_id, default_revision=default_revision)
2410
return old_annotation
2411
if not changed_content:
2412
return old_annotation
2413
# TODO: This is doing something similar to what WT.annotate_iter is
2414
# doing, however it fails slightly because it doesn't know what
2415
# the *other* revision_id is, so it doesn't know how to give the
2416
# other as the origin for some lines, they all get
2417
# 'default_revision'
2418
# It would be nice to be able to use the new Annotator based
2419
# approach, as well.
2420
return annotate.reannotate([old_annotation],
2421
self.get_file(path, file_id).readlines(),
2424
def get_symlink_target(self, path, file_id=None):
2425
"""See Tree.get_symlink_target"""
2427
file_id = self.path2id(path)
2428
if not self._content_change(file_id):
2429
return self._transform._tree.get_symlink_target(path)
2430
trans_id = self._path2trans_id(path)
2431
name = self._transform._limbo_name(trans_id)
2432
return osutils.readlink(name)
2434
def walkdirs(self, prefix=''):
2435
pending = [self._transform.root]
2436
while len(pending) > 0:
2437
parent_id = pending.pop()
2440
prefix = prefix.rstrip('/')
2441
parent_path = self._final_paths.get_path(parent_id)
2442
parent_file_id = self._transform.final_file_id(parent_id)
2443
for child_id in self._all_children(parent_id):
2444
path_from_root = self._final_paths.get_path(child_id)
2445
basename = self._transform.final_name(child_id)
2446
file_id = self._transform.final_file_id(child_id)
2447
kind = self._transform.final_kind(child_id)
2448
if kind is not None:
2449
versioned_kind = kind
2452
versioned_kind = self._transform._tree.stored_kind(
2453
self._transform._tree.id2path(file_id),
2455
if versioned_kind == 'directory':
2456
subdirs.append(child_id)
2457
children.append((path_from_root, basename, kind, None,
2458
file_id, versioned_kind))
2460
if parent_path.startswith(prefix):
2461
yield (parent_path, parent_file_id), children
2462
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2465
def get_parent_ids(self):
2466
return self._parent_ids
2468
def set_parent_ids(self, parent_ids):
2469
self._parent_ids = parent_ids
2471
def get_revision_tree(self, revision_id):
2472
return self._transform._tree.get_revision_tree(revision_id)
864
2475
def joinpath(parent, child):
865
2476
"""Join tree-relative paths, handling the tree root specially"""
866
2477
if parent is None or parent == "":
896
2507
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2508
return self._known_paths[trans_id]
2510
def get_paths(self, trans_ids):
2511
return [(self.get_path(t), t) for t in trans_ids]
899
2515
def topology_sorted_ids(tree):
900
2516
"""Determine the topological order of the ids in a tree"""
901
2517
file_ids = list(tree)
902
2518
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2522
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2523
delta_from_tree=False):
2524
"""Create working tree for a branch, using a TreeTransform.
2526
This function should be used on empty trees, having a tree root at most.
2527
(see merge and revert functionality for working with existing trees)
2529
Existing files are handled like so:
2531
- Existing bzrdirs take precedence over creating new items. They are
2532
created as '%s.diverted' % name.
2533
- Otherwise, if the content on disk matches the content we are building,
2534
it is silently replaced.
2535
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2537
:param tree: The tree to convert wt into a copy of
2538
:param wt: The working tree that files will be placed into
2539
:param accelerator_tree: A tree which can be used for retrieving file
2540
contents more quickly than tree itself, i.e. a workingtree. tree
2541
will be used for cases where accelerator_tree's content is different.
2542
:param hardlink: If true, hard-link files to accelerator_tree, where
2543
possible. accelerator_tree must implement abspath, i.e. be a
2545
:param delta_from_tree: If true, build_tree may use the input Tree to
2546
generate the inventory delta.
2548
with wt.lock_tree_write(), tree.lock_read():
2549
if accelerator_tree is not None:
2550
accelerator_tree.lock_read()
2552
return _build_tree(tree, wt, accelerator_tree, hardlink,
2555
if accelerator_tree is not None:
2556
accelerator_tree.unlock()
2559
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2560
"""See build_tree."""
2561
for num, _unused in enumerate(wt.all_versioned_paths()):
2562
if num > 0: # more than just a root
2563
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2564
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2565
top_pb = ui.ui_factory.nested_progress_bar()
909
2566
pp = ProgressPhase("Build phase", 2, top_pb)
2567
if tree.get_root_id() is not None:
2568
# This is kind of a hack: we should be altering the root
2569
# as part of the regular tree shape diff logic.
2570
# The conditional test here is to avoid doing an
2571
# expensive operation (flush) every time the root id
2572
# is set within the tree, nor setting the root and thus
2573
# marking the tree as dirty, because we use two different
2574
# idioms here: tree interfaces and inventory interfaces.
2575
if wt.get_root_id() != tree.get_root_id():
2576
wt.set_root_id(tree.get_root_id())
910
2578
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2582
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_path('')
2583
with ui.ui_factory.nested_progress_bar() as pb:
2584
deferred_contents = []
2586
total = len(tree.all_versioned_paths())
2588
precomputed_delta = []
2590
precomputed_delta = None
2591
# Check if tree inventory has content. If so, we populate
2592
# existing_files with the directory content. If there are no
2593
# entries we skip populating existing_files as its not used.
2594
# This improves performance and unncessary work on large
2595
# directory trees. (#501307)
2597
existing_files = set()
2598
for dir, files in wt.walkdirs():
2599
existing_files.update(f[0] for f in files)
2600
for num, (tree_path, entry) in \
2601
enumerate(tree.iter_entries_by_dir()):
2602
pb.update(gettext("Building tree"), num - len(deferred_contents), total)
920
2603
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2606
file_id = entry.file_id
2608
precomputed_delta.append((None, tree_path, file_id, entry))
2609
if tree_path in existing_files:
2610
target_path = wt.abspath(tree_path)
2611
kind = file_kind(target_path)
2612
if kind == "directory":
2614
controldir.ControlDir.open(target_path)
2615
except errors.NotBranchError:
2619
if (file_id not in divert and
2620
_content_match(tree, entry, tree_path, file_id, kind,
2622
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2623
if kind == 'directory':
924
2625
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2626
if entry.kind == 'file':
2627
# We *almost* replicate new_by_entry, so that we can defer
2628
# getting the file text, and get them all at once.
2629
trans_id = tt.create_path(entry.name, parent_id)
2630
file_trans_id[file_id] = trans_id
2631
tt.version_file(file_id, trans_id)
2632
executable = tree.is_executable(tree_path, file_id)
2634
tt.set_executability(executable, trans_id)
2635
trans_data = (trans_id, file_id, tree_path, entry.text_sha1)
2636
deferred_contents.append((tree_path, trans_data))
2638
file_trans_id[file_id] = new_by_entry(
2639
tree_path, tt, entry, parent_id, tree)
2641
new_trans_id = file_trans_id[file_id]
2642
old_parent = tt.trans_id_tree_path(tree_path)
2643
_reparent_children(tt, old_parent, new_trans_id)
2644
offset = num + 1 - len(deferred_contents)
2645
_create_files(tt, tree, deferred_contents, pb, offset,
2646
accelerator_tree, hardlink)
2648
divert_trans = set(file_trans_id[f] for f in divert)
2649
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2650
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2651
if len(raw_conflicts) > 0:
2652
precomputed_delta = None
2653
conflicts = cook_conflicts(raw_conflicts, tt)
2654
for conflict in conflicts:
2655
trace.warning(text_type(conflict))
2657
wt.add_conflicts(conflicts)
2658
except errors.UnsupportedOperation:
2660
result = tt.apply(no_conflicts=True,
2661
precomputed_delta=precomputed_delta)
933
2664
top_pb.finished()
935
def new_by_entry(tt, entry, parent_id, tree):
2668
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2670
total = len(desired_files) + offset
2672
if accelerator_tree is None:
2673
new_desired_files = desired_files
2675
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2676
unchanged = [(p[0], p[1]) for (f, p, c, v, d, n, k, e)
2677
in iter if not (c or e[0] != e[1])]
2678
if accelerator_tree.supports_content_filtering():
2679
unchanged = [(tp, ap) for (tp, ap) in unchanged
2680
if not next(accelerator_tree.iter_search_rules([ap]))]
2681
unchanged = dict(unchanged)
2682
new_desired_files = []
2684
for unused_tree_path, (trans_id, file_id, tree_path, text_sha1) in desired_files:
2685
accelerator_path = unchanged.get(tree_path)
2686
if accelerator_path is None:
2687
new_desired_files.append((tree_path,
2688
(trans_id, file_id, tree_path, text_sha1)))
2690
pb.update(gettext('Adding file contents'), count + offset, total)
2692
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2695
with accelerator_tree.get_file(accelerator_path, file_id) as f:
2696
chunks = osutils.file_iterator(f)
2697
if wt.supports_content_filtering():
2698
filters = wt._content_filter_stack(tree_path)
2699
chunks = filtered_output_bytes(chunks, filters,
2700
ContentFilterContext(tree_path, tree))
2701
tt.create_file(chunks, trans_id, sha1=text_sha1)
2704
for count, ((trans_id, file_id, tree_path, text_sha1), contents) in enumerate(
2705
tree.iter_files_bytes(new_desired_files)):
2706
if wt.supports_content_filtering():
2707
filters = wt._content_filter_stack(tree_path)
2708
contents = filtered_output_bytes(contents, filters,
2709
ContentFilterContext(tree_path, tree))
2710
tt.create_file(contents, trans_id, sha1=text_sha1)
2711
pb.update(gettext('Adding file contents'), count + offset, total)
2714
def _reparent_children(tt, old_parent, new_parent):
2715
for child in tt.iter_tree_children(old_parent):
2716
tt.adjust_path(tt.final_name(child), new_parent, child)
2719
def _reparent_transform_children(tt, old_parent, new_parent):
2720
by_parent = tt.by_parent()
2721
for child in by_parent[old_parent]:
2722
tt.adjust_path(tt.final_name(child), new_parent, child)
2723
return by_parent[old_parent]
2726
def _content_match(tree, entry, tree_path, file_id, kind, target_path):
2727
if entry.kind != kind:
2729
if entry.kind == "directory":
2731
if entry.kind == "file":
2732
with open(target_path, 'rb') as f1, \
2733
tree.get_file(tree_path, file_id) as f2:
2734
if osutils.compare_files(f1, f2):
2736
elif entry.kind == "symlink":
2737
if tree.get_symlink_target(tree_path, file_id) == os.readlink(target_path):
2742
def resolve_checkout(tt, conflicts, divert):
2743
new_conflicts = set()
2744
for c_type, conflict in ((c[0], c) for c in conflicts):
2745
# Anything but a 'duplicate' would indicate programmer error
2746
if c_type != 'duplicate':
2747
raise AssertionError(c_type)
2748
# Now figure out which is new and which is old
2749
if tt.new_contents(conflict[1]):
2750
new_file = conflict[1]
2751
old_file = conflict[2]
2753
new_file = conflict[2]
2754
old_file = conflict[1]
2756
# We should only get here if the conflict wasn't completely
2758
final_parent = tt.final_parent(old_file)
2759
if new_file in divert:
2760
new_name = tt.final_name(old_file)+'.diverted'
2761
tt.adjust_path(new_name, final_parent, new_file)
2762
new_conflicts.add((c_type, 'Diverted to',
2763
new_file, old_file))
2765
new_name = tt.final_name(old_file)+'.moved'
2766
tt.adjust_path(new_name, final_parent, old_file)
2767
new_conflicts.add((c_type, 'Moved existing file to',
2768
old_file, new_file))
2769
return new_conflicts
2772
def new_by_entry(path, tt, entry, parent_id, tree):
936
2773
"""Create a new file according to its inventory entry"""
937
2774
name = entry.name
938
2775
kind = entry.kind
939
2776
if kind == 'file':
940
contents = tree.get_file(entry.file_id).readlines()
941
executable = tree.is_executable(entry.file_id)
942
return tt.new_file(name, parent_id, contents, entry.file_id,
944
elif kind == 'directory':
945
return tt.new_directory(name, parent_id, entry.file_id)
2777
with tree.get_file(path, entry.file_id) as f:
2778
executable = tree.is_executable(path, entry.file_id)
2780
name, parent_id, osutils.file_iterator(f), entry.file_id,
2782
elif kind in ('directory', 'tree-reference'):
2783
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2784
if kind == 'tree-reference':
2785
tt.set_tree_reference(entry.reference_revision, trans_id)
946
2787
elif kind == 'symlink':
947
target = tree.get_symlink_target(entry.file_id)
2788
target = tree.get_symlink_target(path, entry.file_id)
948
2789
return tt.new_symlink(name, parent_id, target, entry.file_id)
950
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
951
"""Create new file contents according to an inventory entry."""
952
if entry.kind == "file":
954
lines = tree.get_file(entry.file_id).readlines()
955
tt.create_file(lines, trans_id, mode_id=mode_id)
956
elif entry.kind == "symlink":
957
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
958
elif entry.kind == "directory":
2791
raise errors.BadFileKindError(name, kind)
2794
def create_from_tree(tt, trans_id, tree, path, file_id=None, chunks=None,
2795
filter_tree_path=None):
2796
"""Create new file contents according to tree contents.
2798
:param filter_tree_path: the tree path to use to lookup
2799
content filters to apply to the bytes output in the working tree.
2800
This only applies if the working tree supports content filtering.
2802
kind = tree.kind(path, file_id)
2803
if kind == 'directory':
959
2804
tt.create_directory(trans_id)
2805
elif kind == "file":
2807
f = tree.get_file(path, file_id)
2808
chunks = osutils.file_iterator(f)
2813
if wt.supports_content_filtering() and filter_tree_path is not None:
2814
filters = wt._content_filter_stack(filter_tree_path)
2815
chunks = filtered_output_bytes(chunks, filters,
2816
ContentFilterContext(filter_tree_path, tree))
2817
tt.create_file(chunks, trans_id)
2821
elif kind == "symlink":
2822
tt.create_symlink(tree.get_symlink_target(path, file_id), trans_id)
2824
raise AssertionError('Unknown kind %r' % kind)
961
2827
def create_entry_executability(tt, entry, trans_id):
962
2828
"""Set the executability of a trans_id according to an inventory entry"""
964
2830
tt.set_executability(entry.executable, trans_id)
967
def find_interesting(working_tree, target_tree, filenames):
968
"""Find the ids corresponding to specified filenames."""
970
interesting_ids = None
972
interesting_ids = set()
973
for tree_path in filenames:
975
for tree in (working_tree, target_tree):
976
file_id = tree.inventory.path2id(tree_path)
977
if file_id is not None:
978
interesting_ids.add(file_id)
981
raise NotVersionedError(path=tree_path)
982
return interesting_ids
985
def change_entry(tt, file_id, working_tree, target_tree,
986
trans_id_file_id, backups, trans_id, by_parent):
987
"""Replace a file_id's contents with those from a target tree."""
988
e_trans_id = trans_id_file_id(file_id)
989
entry = target_tree.inventory[file_id]
990
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
996
tt.delete_contents(e_trans_id)
998
parent_trans_id = trans_id_file_id(entry.parent_id)
999
backup_name = get_backup_name(entry, by_parent,
1000
parent_trans_id, tt)
1001
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1002
tt.unversion_file(e_trans_id)
1003
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1004
tt.version_file(file_id, e_trans_id)
1005
trans_id[file_id] = e_trans_id
1006
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1007
create_entry_executability(tt, entry, e_trans_id)
1010
tt.set_executability(entry.executable, e_trans_id)
1011
if tt.final_name(e_trans_id) != entry.name:
1014
parent_id = tt.final_parent(e_trans_id)
1015
parent_file_id = tt.final_file_id(parent_id)
1016
if parent_file_id != entry.parent_id:
1021
parent_trans_id = trans_id_file_id(entry.parent_id)
1022
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
1025
def get_backup_name(entry, by_parent, parent_trans_id, tt):
1026
"""Produce a backup-style name that appears to be available"""
1030
yield "%s.~%d~" % (entry.name, counter)
1032
for name in name_gen():
1033
if not tt.has_named_child(by_parent, parent_trans_id, name):
1036
def _entry_changes(file_id, entry, working_tree):
1037
"""Determine in which ways the inventory entry has changed.
1039
Returns booleans: has_contents, content_mod, meta_mod
1040
has_contents means there are currently contents, but they differ
1041
contents_mod means contents need to be modified
1042
meta_mod means the metadata needs to be modified
1044
cur_entry = working_tree.inventory[file_id]
1046
working_kind = working_tree.kind(file_id)
1049
if e.errno != errno.ENOENT:
1051
has_contents = False
1054
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
1059
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
cur_entry._forget_tree_state()
1065
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2833
def revert(working_tree, target_tree, filenames, backups=False,
2834
pb=None, change_reporter=None):
1070
2835
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
1075
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2836
pb = ui.ui_factory.nested_progress_bar()
2838
with target_tree.lock_read(), TreeTransform(working_tree, pb) as tt:
2839
pp = ProgressPhase("Revert phase", 3, pb)
2840
conflicts, merge_modified = _prepare_revert_transform(
2841
working_tree, target_tree, tt, filenames, backups, pp)
2843
change_reporter = delta._ChangeReporter(
2844
unversioned_filter=working_tree.is_ignored)
2845
delta.report_changes(tt.iter_changes(), change_reporter)
2846
for conflict in conflicts:
2847
trace.warning(text_type(conflict))
2850
if working_tree.supports_merge_modified():
2851
working_tree.set_merge_modified(merge_modified)
2857
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2858
backups, pp, basis_tree=None,
2859
merge_modified=None):
2860
with ui.ui_factory.nested_progress_bar() as child_pb:
2861
if merge_modified is None:
2862
merge_modified = working_tree.merge_modified()
2863
merge_modified = _alter_files(working_tree, target_tree, tt,
2864
child_pb, filenames, backups,
2865
merge_modified, basis_tree)
2866
with ui.ui_factory.nested_progress_bar() as child_pb:
2867
raw_conflicts = resolve_conflicts(tt, child_pb,
2868
lambda t, c: conflict_pass(t, c, target_tree))
2869
conflicts = cook_conflicts(raw_conflicts, tt)
2870
return conflicts, merge_modified
2873
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2874
backups, merge_modified, basis_tree=None):
2875
if basis_tree is not None:
2876
basis_tree.lock_read()
2877
# We ask the working_tree for its changes relative to the target, rather
2878
# than the target changes relative to the working tree. Because WT4 has an
2879
# optimizer to compare itself to a target, but no optimizer for the
2881
change_list = working_tree.iter_changes(target_tree,
2882
specific_files=specific_files, pb=pb)
2883
if not target_tree.is_versioned(u''):
2889
for id_num, (file_id, path, changed_content, versioned, parent, name,
2890
kind, executable) in enumerate(change_list):
2891
target_path, wt_path = path
2892
target_versioned, wt_versioned = versioned
2893
target_parent, wt_parent = parent
2894
target_name, wt_name = name
2895
target_kind, wt_kind = kind
2896
target_executable, wt_executable = executable
2897
if skip_root and wt_parent is None:
2899
trans_id = tt.trans_id_file_id(file_id)
2902
keep_content = False
2903
if wt_kind == 'file' and (backups or target_kind is None):
2904
wt_sha1 = working_tree.get_file_sha1(wt_path, file_id)
2905
if merge_modified.get(file_id) != wt_sha1:
2906
# acquire the basis tree lazily to prevent the
2907
# expense of accessing it when it's not needed ?
2908
# (Guessing, RBC, 200702)
2909
if basis_tree is None:
2910
basis_tree = working_tree.basis_tree()
2911
basis_tree.lock_read()
2912
basis_path = find_previous_path(working_tree, basis_tree, wt_path)
2913
if basis_path is None:
2914
if target_kind is None and not target_versioned:
2917
if wt_sha1 != basis_tree.get_file_sha1(basis_path, file_id):
2919
if wt_kind is not None:
2920
if not keep_content:
1121
2921
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2922
elif target_kind is not None:
2923
parent_trans_id = tt.trans_id_file_id(wt_parent)
2924
backup_name = tt._available_backup_name(
2925
wt_name, parent_trans_id)
2926
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2927
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2928
if wt_versioned and target_versioned:
2929
tt.unversion_file(trans_id)
2930
tt.version_file(file_id, new_trans_id)
2931
# New contents should have the same unix perms as old
2934
trans_id = new_trans_id
2935
if target_kind in ('directory', 'tree-reference'):
2936
tt.create_directory(trans_id)
2937
if target_kind == 'tree-reference':
2938
revision = target_tree.get_reference_revision(
2939
target_path, file_id)
2940
tt.set_tree_reference(revision, trans_id)
2941
elif target_kind == 'symlink':
2942
tt.create_symlink(target_tree.get_symlink_target(
2943
target_path, file_id), trans_id)
2944
elif target_kind == 'file':
2945
deferred_files.append((target_path, (trans_id, mode_id, file_id)))
2946
if basis_tree is None:
2947
basis_tree = working_tree.basis_tree()
2948
basis_tree.lock_read()
2949
new_sha1 = target_tree.get_file_sha1(target_path, file_id)
2950
basis_path = find_previous_path(target_tree, basis_tree, target_path)
2951
if (basis_path is not None and
2952
new_sha1 == basis_tree.get_file_sha1(basis_path, file_id)):
2953
if file_id in merge_modified:
2954
del merge_modified[file_id]
2956
merge_modified[file_id] = new_sha1
2958
# preserve the execute bit when backing up
2959
if keep_content and wt_executable == target_executable:
2960
tt.set_executability(target_executable, trans_id)
2961
elif target_kind is not None:
2962
raise AssertionError(target_kind)
2963
if not wt_versioned and target_versioned:
2964
tt.version_file(file_id, trans_id)
2965
if wt_versioned and not target_versioned:
2966
tt.unversion_file(trans_id)
2967
if (target_name is not None and
2968
(wt_name != target_name or wt_parent != target_parent)):
2969
if target_name == '' and target_parent is None:
2970
parent_trans = ROOT_PARENT
2972
parent_trans = tt.trans_id_file_id(target_parent)
2973
if wt_parent is None and wt_versioned:
2974
tt.adjust_root_path(target_name, parent_trans)
2976
tt.adjust_path(target_name, parent_trans, trans_id)
2977
if wt_executable != target_executable and target_kind == "file":
2978
tt.set_executability(target_executable, trans_id)
2979
if working_tree.supports_content_filtering():
2980
for (trans_id, mode_id, file_id), bytes in (
2981
target_tree.iter_files_bytes(deferred_files)):
2982
# We're reverting a tree to the target tree so using the
2983
# target tree to find the file path seems the best choice
2984
# here IMO - Ian C 27/Oct/2009
2985
filter_tree_path = target_tree.id2path(file_id)
2986
filters = working_tree._content_filter_stack(filter_tree_path)
2987
bytes = filtered_output_bytes(bytes, filters,
2988
ContentFilterContext(filter_tree_path, working_tree))
2989
tt.create_file(bytes, trans_id, mode_id)
2991
for (trans_id, mode_id, file_id), bytes in target_tree.iter_files_bytes(
2993
tt.create_file(bytes, trans_id, mode_id)
2994
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
2996
if basis_tree is not None:
2998
return merge_modified
3001
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3002
"""Make many conflict-resolution attempts, but die if they fail"""
3003
if pass_func is None:
3004
pass_func = conflict_pass
1145
3005
new_conflicts = set()
3006
with ui.ui_factory.nested_progress_bar() as pb:
1147
3007
for n in range(10):
1148
pb.update('Resolution pass', n+1, 10)
3008
pb.update(gettext('Resolution pass'), n+1, 10)
1149
3009
conflicts = tt.find_conflicts()
1150
3010
if len(conflicts) == 0:
1151
3011
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3012
new_conflicts.update(pass_func(tt, conflicts))
1153
3013
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3016
def conflict_pass(tt, conflicts, path_tree=None):
3017
"""Resolve some classes of conflicts.
3019
:param tt: The transform to resolve conflicts in
3020
:param conflicts: The conflicts to resolve
3021
:param path_tree: A Tree to get supplemental paths from
1160
3023
new_conflicts = set()
1161
3024
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3025
if c_type == 'duplicate id':