861
831
self.create_symlink(target, trans_id)
834
def new_orphan(self, trans_id, parent_id):
835
"""Schedule an item to be orphaned.
837
When a directory is about to be removed, its children, if they are not
838
versioned are moved out of the way: they don't have a parent anymore.
840
:param trans_id: The trans_id of the existing item.
841
:param parent_id: The parent trans_id of the item.
843
raise NotImplementedError(self.new_orphan)
845
def _get_potential_orphans(self, dir_id):
846
"""Find the potential orphans in a directory.
848
A directory can't be safely deleted if there are versioned files in it.
849
If all the contained files are unversioned then they can be orphaned.
851
The 'None' return value means that the directory contains at least one
852
versioned file and should not be deleted.
854
:param dir_id: The directory trans id.
856
:return: A list of the orphan trans ids or None if at least one
857
versioned file is present.
860
# Find the potential orphans, stop if one item should be kept
861
for child_tid in self.by_parent()[dir_id]:
862
if child_tid in self._removed_contents:
863
# The child is removed as part of the transform. Since it was
864
# versioned before, it's not an orphan
866
elif self.final_file_id(child_tid) is None:
867
# The child is not versioned
868
orphans.append(child_tid)
870
# We have a versioned file here, searching for orphans is
876
def _affected_ids(self):
877
"""Return the set of transform ids affected by the transform"""
878
trans_ids = set(self._removed_id)
879
trans_ids.update(self._new_id)
880
trans_ids.update(self._removed_contents)
881
trans_ids.update(self._new_contents)
882
trans_ids.update(self._new_executability)
883
trans_ids.update(self._new_name)
884
trans_ids.update(self._new_parent)
887
def _get_file_id_maps(self):
888
"""Return mapping of file_ids to trans_ids in the to and from states"""
889
trans_ids = self._affected_ids()
892
# Build up two dicts: trans_ids associated with file ids in the
893
# FROM state, vs the TO state.
894
for trans_id in trans_ids:
895
from_file_id = self.tree_file_id(trans_id)
896
if from_file_id is not None:
897
from_trans_ids[from_file_id] = trans_id
898
to_file_id = self.final_file_id(trans_id)
899
if to_file_id is not None:
900
to_trans_ids[to_file_id] = trans_id
901
return from_trans_ids, to_trans_ids
903
def _from_file_data(self, from_trans_id, from_versioned, from_path):
904
"""Get data about a file in the from (tree) state
906
Return a (name, parent, kind, executable) tuple
908
from_path = self._tree_id_paths.get(from_trans_id)
910
# get data from working tree if versioned
911
from_entry = next(self._tree.iter_entries_by_dir(
912
specific_files=[from_path]))[1]
913
from_name = from_entry.name
914
from_parent = from_entry.parent_id
917
if from_path is None:
918
# File does not exist in FROM state
922
# File exists, but is not versioned. Have to use path-
924
from_name = os.path.basename(from_path)
925
tree_parent = self.get_tree_parent(from_trans_id)
926
from_parent = self.tree_file_id(tree_parent)
927
if from_path is not None:
928
from_kind, from_executable, from_stats = \
929
self._tree._comparison_data(from_entry, from_path)
932
from_executable = False
933
return from_name, from_parent, from_kind, from_executable
935
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
936
"""Get data about a file in the to (target) state
938
Return a (name, parent, kind, executable) tuple
940
to_name = self.final_name(to_trans_id)
941
to_kind = self.final_kind(to_trans_id)
942
to_parent = self.final_file_id(self.final_parent(to_trans_id))
943
if to_trans_id in self._new_executability:
944
to_executable = self._new_executability[to_trans_id]
945
elif to_trans_id == from_trans_id:
946
to_executable = from_executable
948
to_executable = False
949
return to_name, to_parent, to_kind, to_executable
951
def iter_changes(self):
952
"""Produce output in the same format as Tree.iter_changes.
954
Will produce nonsensical results if invoked while inventory/filesystem
955
conflicts (as reported by TreeTransform.find_conflicts()) are present.
957
This reads the Transform, but only reproduces changes involving a
958
file_id. Files that are not versioned in either of the FROM or TO
959
states are not reflected.
961
final_paths = FinalPaths(self)
962
from_trans_ids, to_trans_ids = self._get_file_id_maps()
964
# Now iterate through all active file_ids
965
for file_id in set(from_trans_ids).union(to_trans_ids):
967
from_trans_id = from_trans_ids.get(file_id)
968
# find file ids, and determine versioning state
969
if from_trans_id is None:
970
from_versioned = False
971
from_trans_id = to_trans_ids[file_id]
973
from_versioned = True
974
to_trans_id = to_trans_ids.get(file_id)
975
if to_trans_id is None:
977
to_trans_id = from_trans_id
981
if not from_versioned:
984
from_path = self._tree_id_paths.get(from_trans_id)
988
to_path = final_paths.get_path(to_trans_id)
990
from_name, from_parent, from_kind, from_executable = \
991
self._from_file_data(from_trans_id, from_versioned, from_path)
993
to_name, to_parent, to_kind, to_executable = \
994
self._to_file_data(to_trans_id, from_trans_id, from_executable)
996
if from_kind != to_kind:
998
elif to_kind in ('file', 'symlink') and (
999
to_trans_id != from_trans_id
1000
or to_trans_id in self._new_contents):
1002
if (not modified and from_versioned == to_versioned
1003
and from_parent == to_parent and from_name == to_name
1004
and from_executable == to_executable):
1008
file_id, (from_path, to_path), modified,
1009
(from_versioned, to_versioned),
1010
(from_parent, to_parent),
1011
(from_name, to_name),
1012
(from_kind, to_kind),
1013
(from_executable, to_executable)))
1016
return (c.path[0] or '', c.path[1] or '')
1017
return iter(sorted(results, key=path_key))
1019
def get_preview_tree(self):
1020
"""Return a tree representing the result of the transform.
1022
The tree is a snapshot, and altering the TreeTransform will invalidate
1025
return _PreviewTree(self)
1027
def commit(self, branch, message, merge_parents=None, strict=False,
1028
timestamp=None, timezone=None, committer=None, authors=None,
1029
revprops=None, revision_id=None):
1030
"""Commit the result of this TreeTransform to a branch.
1032
:param branch: The branch to commit to.
1033
:param message: The message to attach to the commit.
1034
:param merge_parents: Additional parent revision-ids specified by
1036
:param strict: If True, abort the commit if there are unversioned
1038
:param timestamp: if not None, seconds-since-epoch for the time and
1039
date. (May be a float.)
1040
:param timezone: Optional timezone for timestamp, as an offset in
1042
:param committer: Optional committer in email-id format.
1043
(e.g. "J Random Hacker <jrandom@example.com>")
1044
:param authors: Optional list of authors in email-id format.
1045
:param revprops: Optional dictionary of revision properties.
1046
:param revision_id: Optional revision id. (Specifying a revision-id
1047
may reduce performance for some non-native formats.)
1048
:return: The revision_id of the revision committed.
1050
self._check_malformed()
1052
unversioned = set(self._new_contents).difference(set(self._new_id))
1053
for trans_id in unversioned:
1054
if self.final_file_id(trans_id) is None:
1055
raise errors.StrictCommitFailed()
1057
revno, last_rev_id = branch.last_revision_info()
1058
if last_rev_id == _mod_revision.NULL_REVISION:
1059
if merge_parents is not None:
1060
raise ValueError('Cannot supply merge parents for first'
1064
parent_ids = [last_rev_id]
1065
if merge_parents is not None:
1066
parent_ids.extend(merge_parents)
1067
if self._tree.get_revision_id() != last_rev_id:
1068
raise ValueError('TreeTransform not based on branch basis: %s' %
1069
self._tree.get_revision_id().decode('utf-8'))
1070
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1071
builder = branch.get_commit_builder(parent_ids,
1072
timestamp=timestamp,
1074
committer=committer,
1076
revision_id=revision_id)
1077
preview = self.get_preview_tree()
1078
list(builder.record_iter_changes(preview, last_rev_id,
1079
self.iter_changes()))
1080
builder.finish_inventory()
1081
revision_id = builder.commit(message)
1082
branch.set_last_revision_info(revno + 1, revision_id)
1085
def _text_parent(self, trans_id):
1086
path = self.tree_path(trans_id)
1088
if path is None or self._tree.kind(path) != 'file':
1090
except errors.NoSuchFile:
1094
def _get_parents_texts(self, trans_id):
1095
"""Get texts for compression parents of this file."""
1096
path = self._text_parent(trans_id)
1099
return (self._tree.get_file_text(path),)
1101
def _get_parents_lines(self, trans_id):
1102
"""Get lines for compression parents of this file."""
1103
path = self._text_parent(trans_id)
1106
return (self._tree.get_file_lines(path),)
1108
def serialize(self, serializer):
1109
"""Serialize this TreeTransform.
1111
:param serializer: A Serialiser like pack.ContainerSerializer.
1113
new_name = {k.encode('utf-8'): v.encode('utf-8')
1114
for k, v in viewitems(self._new_name)}
1115
new_parent = {k.encode('utf-8'): v.encode('utf-8')
1116
for k, v in viewitems(self._new_parent)}
1117
new_id = {k.encode('utf-8'): v
1118
for k, v in viewitems(self._new_id)}
1119
new_executability = {k.encode('utf-8'): int(v)
1120
for k, v in viewitems(self._new_executability)}
1121
tree_path_ids = {k.encode('utf-8'): v.encode('utf-8')
1122
for k, v in viewitems(self._tree_path_ids)}
1123
non_present_ids = {k: v.encode('utf-8')
1124
for k, v in viewitems(self._non_present_ids)}
1125
removed_contents = [trans_id.encode('utf-8')
1126
for trans_id in self._removed_contents]
1127
removed_id = [trans_id.encode('utf-8')
1128
for trans_id in self._removed_id]
1130
b'_id_number': self._id_number,
1131
b'_new_name': new_name,
1132
b'_new_parent': new_parent,
1133
b'_new_executability': new_executability,
1135
b'_tree_path_ids': tree_path_ids,
1136
b'_removed_id': removed_id,
1137
b'_removed_contents': removed_contents,
1138
b'_non_present_ids': non_present_ids,
1140
yield serializer.bytes_record(bencode.bencode(attribs),
1142
for trans_id, kind in sorted(viewitems(self._new_contents)):
1144
with open(self._limbo_name(trans_id), 'rb') as cur_file:
1145
lines = cur_file.readlines()
1146
parents = self._get_parents_lines(trans_id)
1147
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1148
content = b''.join(mpdiff.to_patch())
1149
if kind == 'directory':
1151
if kind == 'symlink':
1152
content = self._read_symlink_target(trans_id)
1153
if not isinstance(content, bytes):
1154
content = content.encode('utf-8')
1155
yield serializer.bytes_record(
1156
content, ((trans_id.encode('utf-8'), kind.encode('ascii')),))
1158
def deserialize(self, records):
1159
"""Deserialize a stored TreeTransform.
1161
:param records: An iterable of (names, content) tuples, as per
1162
pack.ContainerPushParser.
1164
names, content = next(records)
1165
attribs = bencode.bdecode(content)
1166
self._id_number = attribs[b'_id_number']
1167
self._new_name = {k.decode('utf-8'): v.decode('utf-8')
1168
for k, v in viewitems(attribs[b'_new_name'])}
1169
self._new_parent = {k.decode('utf-8'): v.decode('utf-8')
1170
for k, v in viewitems(attribs[b'_new_parent'])}
1171
self._new_executability = {
1172
k.decode('utf-8'): bool(v)
1173
for k, v in viewitems(attribs[b'_new_executability'])}
1174
self._new_id = {k.decode('utf-8'): v
1175
for k, v in viewitems(attribs[b'_new_id'])}
1176
self._r_new_id = {v: k for k, v in viewitems(self._new_id)}
1177
self._tree_path_ids = {}
1178
self._tree_id_paths = {}
1179
for bytepath, trans_id in viewitems(attribs[b'_tree_path_ids']):
1180
path = bytepath.decode('utf-8')
1181
trans_id = trans_id.decode('utf-8')
1182
self._tree_path_ids[path] = trans_id
1183
self._tree_id_paths[trans_id] = path
1184
self._removed_id = {trans_id.decode('utf-8')
1185
for trans_id in attribs[b'_removed_id']}
1186
self._removed_contents = set(
1187
trans_id.decode('utf-8')
1188
for trans_id in attribs[b'_removed_contents'])
1189
self._non_present_ids = {
1190
k: v.decode('utf-8')
1191
for k, v in viewitems(attribs[b'_non_present_ids'])}
1192
for ((trans_id, kind),), content in records:
1193
trans_id = trans_id.decode('utf-8')
1194
kind = kind.decode('ascii')
1196
mpdiff = multiparent.MultiParent.from_patch(content)
1197
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1198
self.create_file(lines, trans_id)
1199
if kind == 'directory':
1200
self.create_directory(trans_id)
1201
if kind == 'symlink':
1202
self.create_symlink(content.decode('utf-8'), trans_id)
1204
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1205
"""Schedule creation of a new file.
1209
:param contents: an iterator of strings, all of which will be written
1210
to the target destination.
1211
:param trans_id: TreeTransform handle
1212
:param mode_id: If not None, force the mode of the target file to match
1213
the mode of the object referenced by mode_id.
1214
Otherwise, we will try to preserve mode bits of an existing file.
1215
:param sha1: If the sha1 of this content is already known, pass it in.
1216
We can use it to prevent future sha1 computations.
1218
raise NotImplementedError(self.create_file)
1220
def create_directory(self, trans_id):
1221
"""Schedule creation of a new directory.
1223
See also new_directory.
1225
raise NotImplementedError(self.create_directory)
1227
def create_symlink(self, target, trans_id):
1228
"""Schedule creation of a new symbolic link.
1230
target is a bytestring.
1231
See also new_symlink.
1233
raise NotImplementedError(self.create_symlink)
1235
def create_hardlink(self, path, trans_id):
1236
"""Schedule creation of a hard link"""
1237
raise NotImplementedError(self.create_hardlink)
1239
def cancel_creation(self, trans_id):
1240
"""Cancel the creation of new file contents."""
1241
raise NotImplementedError(self.cancel_creation)
1244
class DiskTreeTransform(TreeTransformBase):
1245
"""Tree transform storing its contents on disk."""
1247
def __init__(self, tree, limbodir, pb=None, case_sensitive=True):
1249
:param tree: The tree that will be transformed, but not necessarily
1251
:param limbodir: A directory where new files can be stored until
1252
they are installed in their proper places
1254
:param case_sensitive: If True, the target of the transform is
1255
case sensitive, not just case preserving.
1257
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1258
self._limbodir = limbodir
1259
self._deletiondir = None
1260
# A mapping of transform ids to their limbo filename
1261
self._limbo_files = {}
1262
self._possibly_stale_limbo_files = set()
1263
# A mapping of transform ids to a set of the transform ids of children
1264
# that their limbo directory has
1265
self._limbo_children = {}
1266
# Map transform ids to maps of child filename to child transform id
1267
self._limbo_children_names = {}
1268
# List of transform ids that need to be renamed from limbo into place
1269
self._needs_rename = set()
1270
self._creation_mtime = None
1271
self._create_symlinks = osutils.supports_symlinks(self._limbodir)
1274
"""Release the working tree lock, if held, clean up limbo dir.
1276
This is required if apply has not been invoked, but can be invoked
1279
if self._tree is None:
1282
limbo_paths = list(viewvalues(self._limbo_files))
1283
limbo_paths.extend(self._possibly_stale_limbo_files)
1284
limbo_paths.sort(reverse=True)
1285
for path in limbo_paths:
1288
except OSError as e:
1289
if e.errno != errno.ENOENT:
1291
# XXX: warn? perhaps we just got interrupted at an
1292
# inconvenient moment, but perhaps files are disappearing
1295
delete_any(self._limbodir)
1297
# We don't especially care *why* the dir is immortal.
1298
raise ImmortalLimbo(self._limbodir)
1300
if self._deletiondir is not None:
1301
delete_any(self._deletiondir)
1303
raise errors.ImmortalPendingDeletion(self._deletiondir)
1305
TreeTransformBase.finalize(self)
1307
def _limbo_supports_executable(self):
1308
"""Check if the limbo path supports the executable bit."""
1309
return osutils.supports_executable(self._limbodir)
1311
def _limbo_name(self, trans_id):
1312
"""Generate the limbo name of a file"""
1313
limbo_name = self._limbo_files.get(trans_id)
1314
if limbo_name is None:
1315
limbo_name = self._generate_limbo_path(trans_id)
1316
self._limbo_files[trans_id] = limbo_name
1319
def _generate_limbo_path(self, trans_id):
1320
"""Generate a limbo path using the trans_id as the relative path.
1322
This is suitable as a fallback, and when the transform should not be
1323
sensitive to the path encoding of the limbo directory.
1325
self._needs_rename.add(trans_id)
1326
return pathjoin(self._limbodir, trans_id)
1328
def adjust_path(self, name, parent, trans_id):
1329
previous_parent = self._new_parent.get(trans_id)
1330
previous_name = self._new_name.get(trans_id)
1331
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1332
if (trans_id in self._limbo_files
1333
and trans_id not in self._needs_rename):
1334
self._rename_in_limbo([trans_id])
1335
if previous_parent != parent:
1336
self._limbo_children[previous_parent].remove(trans_id)
1337
if previous_parent != parent or previous_name != name:
1338
del self._limbo_children_names[previous_parent][previous_name]
1340
def _rename_in_limbo(self, trans_ids):
1341
"""Fix limbo names so that the right final path is produced.
1343
This means we outsmarted ourselves-- we tried to avoid renaming
1344
these files later by creating them with their final names in their
1345
final parents. But now the previous name or parent is no longer
1346
suitable, so we have to rename them.
1348
Even for trans_ids that have no new contents, we must remove their
1349
entries from _limbo_files, because they are now stale.
1351
for trans_id in trans_ids:
1352
old_path = self._limbo_files[trans_id]
1353
self._possibly_stale_limbo_files.add(old_path)
1354
del self._limbo_files[trans_id]
1355
if trans_id not in self._new_contents:
1357
new_path = self._limbo_name(trans_id)
1358
os.rename(old_path, new_path)
1359
self._possibly_stale_limbo_files.remove(old_path)
1360
for descendant in self._limbo_descendants(trans_id):
1361
desc_path = self._limbo_files[descendant]
1362
desc_path = new_path + desc_path[len(old_path):]
1363
self._limbo_files[descendant] = desc_path
1365
def _limbo_descendants(self, trans_id):
1366
"""Return the set of trans_ids whose limbo paths descend from this."""
1367
descendants = set(self._limbo_children.get(trans_id, []))
1368
for descendant in list(descendants):
1369
descendants.update(self._limbo_descendants(descendant))
1372
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1373
"""Schedule creation of a new file.
1377
:param contents: an iterator of strings, all of which will be written
1378
to the target destination.
1379
:param trans_id: TreeTransform handle
1380
:param mode_id: If not None, force the mode of the target file to match
1381
the mode of the object referenced by mode_id.
1382
Otherwise, we will try to preserve mode bits of an existing file.
1383
:param sha1: If the sha1 of this content is already known, pass it in.
1384
We can use it to prevent future sha1 computations.
1386
name = self._limbo_name(trans_id)
1387
with open(name, 'wb') as f:
1388
unique_add(self._new_contents, trans_id, 'file')
1389
f.writelines(contents)
1390
self._set_mtime(name)
1391
self._set_mode(trans_id, mode_id, S_ISREG)
1392
# It is unfortunate we have to use lstat instead of fstat, but we just
1393
# used utime and chmod on the file, so we need the accurate final
1395
if sha1 is not None:
1396
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1398
def _read_symlink_target(self, trans_id):
1399
return os.readlink(self._limbo_name(trans_id))
1401
def _set_mtime(self, path):
1402
"""All files that are created get the same mtime.
1404
This time is set by the first object to be created.
1406
if self._creation_mtime is None:
1407
self._creation_mtime = time.time()
1408
os.utime(path, (self._creation_mtime, self._creation_mtime))
1410
def create_hardlink(self, path, trans_id):
1411
"""Schedule creation of a hard link"""
1412
name = self._limbo_name(trans_id)
1415
except OSError as e:
1416
if e.errno != errno.EPERM:
1418
raise errors.HardLinkNotSupported(path)
1420
unique_add(self._new_contents, trans_id, 'file')
1421
except BaseException:
1422
# Clean up the file, it never got registered so
1423
# TreeTransform.finalize() won't clean it up.
1427
def create_directory(self, trans_id):
1428
"""Schedule creation of a new directory.
1430
See also new_directory.
1432
os.mkdir(self._limbo_name(trans_id))
1433
unique_add(self._new_contents, trans_id, 'directory')
1435
def create_symlink(self, target, trans_id):
1436
"""Schedule creation of a new symbolic link.
1438
target is a bytestring.
1439
See also new_symlink.
1441
if self._create_symlinks:
1442
os.symlink(target, self._limbo_name(trans_id))
1445
path = FinalPaths(self).get_path(trans_id)
1449
'Unable to create symlink "%s" on this filesystem.' % (path,))
1450
# We add symlink to _new_contents even if they are unsupported
1451
# and not created. These entries are subsequently used to avoid
1452
# conflicts on platforms that don't support symlink
1453
unique_add(self._new_contents, trans_id, 'symlink')
1455
def cancel_creation(self, trans_id):
1456
"""Cancel the creation of new file contents."""
1457
del self._new_contents[trans_id]
1458
if trans_id in self._observed_sha1s:
1459
del self._observed_sha1s[trans_id]
1460
children = self._limbo_children.get(trans_id)
1461
# if this is a limbo directory with children, move them before removing
1463
if children is not None:
1464
self._rename_in_limbo(children)
1465
del self._limbo_children[trans_id]
1466
del self._limbo_children_names[trans_id]
1467
delete_any(self._limbo_name(trans_id))
1469
def new_orphan(self, trans_id, parent_id):
1470
conf = self._tree.get_config_stack()
1471
handle_orphan = conf.get('transform.orphan_policy')
1472
handle_orphan(self, trans_id, parent_id)
1475
class OrphaningError(errors.BzrError):
1477
# Only bugs could lead to such exception being seen by the user
1478
internal_error = True
1479
_fmt = "Error while orphaning %s in %s directory"
1481
def __init__(self, orphan, parent):
1482
errors.BzrError.__init__(self)
1483
self.orphan = orphan
1484
self.parent = parent
1487
class OrphaningForbidden(OrphaningError):
1489
_fmt = "Policy: %s doesn't allow creating orphans."
1491
def __init__(self, policy):
1492
errors.BzrError.__init__(self)
1493
self.policy = policy
1496
def move_orphan(tt, orphan_id, parent_id):
1497
"""See TreeTransformBase.new_orphan.
1499
This creates a new orphan in the `brz-orphans` dir at the root of the
1502
:param tt: The TreeTransform orphaning `trans_id`.
1504
:param orphan_id: The trans id that should be orphaned.
1506
:param parent_id: The orphan parent trans id.
1508
# Add the orphan dir if it doesn't exist
1509
orphan_dir_basename = 'brz-orphans'
1510
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1511
if tt.final_kind(od_id) is None:
1512
tt.create_directory(od_id)
1513
parent_path = tt._tree_id_paths[parent_id]
1514
# Find a name that doesn't exist yet in the orphan dir
1515
actual_name = tt.final_name(orphan_id)
1516
new_name = tt._available_backup_name(actual_name, od_id)
1517
tt.adjust_path(new_name, od_id, orphan_id)
1518
trace.warning('%s has been orphaned in %s'
1519
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1522
def refuse_orphan(tt, orphan_id, parent_id):
1523
"""See TreeTransformBase.new_orphan.
1525
This refuses to create orphan, letting the caller handle the conflict.
1527
raise OrphaningForbidden('never')
1530
orphaning_registry = registry.Registry()
1531
orphaning_registry.register(
1532
u'conflict', refuse_orphan,
1533
'Leave orphans in place and create a conflict on the directory.')
1534
orphaning_registry.register(
1535
u'move', move_orphan,
1536
'Move orphans into the brz-orphans directory.')
1537
orphaning_registry._set_default_key(u'conflict')
1540
opt_transform_orphan = _mod_config.RegistryOption(
1541
'transform.orphan_policy', orphaning_registry,
1542
help='Policy for orphaned files during transform operations.',
1546
class TreeTransform(DiskTreeTransform):
1547
"""Represent a tree transformation.
1549
This object is designed to support incremental generation of the transform,
1552
However, it gives optimum performance when parent directories are created
1553
before their contents. The transform is then able to put child files
1554
directly in their parent directory, avoiding later renames.
1556
It is easy to produce malformed transforms, but they are generally
1557
harmless. Attempting to apply a malformed transform will cause an
1558
exception to be raised before any modifications are made to the tree.
1560
Many kinds of malformed transforms can be corrected with the
1561
resolve_conflicts function. The remaining ones indicate programming error,
1562
such as trying to create a file with no path.
1564
Two sets of file creation methods are supplied. Convenience methods are:
1569
These are composed of the low-level methods:
1571
* create_file or create_directory or create_symlink
1575
Transform/Transaction ids
1576
-------------------------
1577
trans_ids are temporary ids assigned to all files involved in a transform.
1578
It's possible, even common, that not all files in the Tree have trans_ids.
1580
trans_ids are used because filenames and file_ids are not good enough
1581
identifiers; filenames change, and not all files have file_ids. File-ids
1582
are also associated with trans-ids, so that moving a file moves its
1585
trans_ids are only valid for the TreeTransform that generated them.
1589
Limbo is a temporary directory use to hold new versions of files.
1590
Files are added to limbo by create_file, create_directory, create_symlink,
1591
and their convenience variants (new_*). Files may be removed from limbo
1592
using cancel_creation. Files are renamed from limbo into their final
1593
location as part of TreeTransform.apply
1595
Limbo must be cleaned up, by either calling TreeTransform.apply or
1596
calling TreeTransform.finalize.
1598
Files are placed into limbo inside their parent directories, where
1599
possible. This reduces subsequent renames, and makes operations involving
1600
lots of files faster. This optimization is only possible if the parent
1601
directory is created *before* creating any of its children, so avoid
1602
creating children before parents, where possible.
1606
This temporary directory is used by _FileMover for storing files that are
1607
about to be deleted. In case of rollback, the files will be restored.
1608
FileMover does not delete files until it is sure that a rollback will not
1612
def __init__(self, tree, pb=None):
1613
"""Note: a tree_write lock is taken on the tree.
1615
Use TreeTransform.finalize() to release the lock (can be omitted if
1616
TreeTransform.apply() called).
1618
tree.lock_tree_write()
1620
limbodir = urlutils.local_path_from_url(
1621
tree._transport.abspath('limbo'))
1622
osutils.ensure_empty_directory_exists(
1624
errors.ExistingLimbo)
1625
deletiondir = urlutils.local_path_from_url(
1626
tree._transport.abspath('pending-deletion'))
1627
osutils.ensure_empty_directory_exists(
1629
errors.ExistingPendingDeletion)
1630
except BaseException:
1634
# Cache of realpath results, to speed up canonical_path
1635
self._realpaths = {}
1636
# Cache of relpath results, to speed up canonical_path
1638
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1639
tree.case_sensitive)
1640
self._deletiondir = deletiondir
1642
def canonical_path(self, path):
1643
"""Get the canonical tree-relative path"""
1644
# don't follow final symlinks
1645
abs = self._tree.abspath(path)
1646
if abs in self._relpaths:
1647
return self._relpaths[abs]
1648
dirname, basename = os.path.split(abs)
1649
if dirname not in self._realpaths:
1650
self._realpaths[dirname] = os.path.realpath(dirname)
1651
dirname = self._realpaths[dirname]
1652
abs = pathjoin(dirname, basename)
1653
if dirname in self._relpaths:
1654
relpath = pathjoin(self._relpaths[dirname], basename)
1655
relpath = relpath.rstrip('/\\')
1657
relpath = self._tree.relpath(abs)
1658
self._relpaths[abs] = relpath
1661
def tree_kind(self, trans_id):
1662
"""Determine the file kind in the working tree.
1664
:returns: The file kind or None if the file does not exist
1666
path = self._tree_id_paths.get(trans_id)
1670
return file_kind(self._tree.abspath(path))
1671
except errors.NoSuchFile:
1674
def _set_mode(self, trans_id, mode_id, typefunc):
1675
"""Set the mode of new file contents.
1676
The mode_id is the existing file to get the mode from (often the same
1677
as trans_id). The operation is only performed if there's a mode match
1678
according to typefunc.
1683
old_path = self._tree_id_paths[mode_id]
1687
mode = os.stat(self._tree.abspath(old_path)).st_mode
1688
except OSError as e:
1689
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1690
# Either old_path doesn't exist, or the parent of the
1691
# target is not a directory (but will be one eventually)
1692
# Either way, we know it doesn't exist *right now*
1693
# See also bug #248448
1698
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1700
def iter_tree_children(self, parent_id):
1701
"""Iterate through the entry's tree children, if any"""
1703
path = self._tree_id_paths[parent_id]
1707
children = os.listdir(self._tree.abspath(path))
1708
except OSError as e:
1709
if not (osutils._is_error_enotdir(e) or
1710
e.errno in (errno.ENOENT, errno.ESRCH)):
1714
for child in children:
1715
childpath = joinpath(path, child)
1716
if self._tree.is_control_filename(childpath):
1718
yield self.trans_id_tree_path(childpath)
1720
def _generate_limbo_path(self, trans_id):
1721
"""Generate a limbo path using the final path if possible.
1723
This optimizes the performance of applying the tree transform by
1724
avoiding renames. These renames can be avoided only when the parent
1725
directory is already scheduled for creation.
1727
If the final path cannot be used, falls back to using the trans_id as
1730
parent = self._new_parent.get(trans_id)
1731
# if the parent directory is already in limbo (e.g. when building a
1732
# tree), choose a limbo name inside the parent, to reduce further
1734
use_direct_path = False
1735
if self._new_contents.get(parent) == 'directory':
1736
filename = self._new_name.get(trans_id)
1737
if filename is not None:
1738
if parent not in self._limbo_children:
1739
self._limbo_children[parent] = set()
1740
self._limbo_children_names[parent] = {}
1741
use_direct_path = True
1742
# the direct path can only be used if no other file has
1743
# already taken this pathname, i.e. if the name is unused, or
1744
# if it is already associated with this trans_id.
1745
elif self._case_sensitive_target:
1746
if (self._limbo_children_names[parent].get(filename)
1747
in (trans_id, None)):
1748
use_direct_path = True
1750
for l_filename, l_trans_id in viewitems(
1751
self._limbo_children_names[parent]):
1752
if l_trans_id == trans_id:
1754
if l_filename.lower() == filename.lower():
1757
use_direct_path = True
1759
if not use_direct_path:
1760
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1762
limbo_name = pathjoin(self._limbo_files[parent], filename)
1763
self._limbo_children[parent].add(trans_id)
1764
self._limbo_children_names[parent][filename] = trans_id
1767
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1768
"""Apply all changes to the inventory and filesystem.
1770
If filesystem or inventory conflicts are present, MalformedTransform
1773
If apply succeeds, finalize is not necessary.
1775
:param no_conflicts: if True, the caller guarantees there are no
1776
conflicts, so no check is made.
1777
:param precomputed_delta: An inventory delta to use instead of
1779
:param _mover: Supply an alternate FileMover, for testing
1781
for hook in MutableTree.hooks['pre_transform']:
1782
hook(self._tree, self)
1783
if not no_conflicts:
1784
self._check_malformed()
1785
with ui.ui_factory.nested_progress_bar() as child_pb:
1786
if precomputed_delta is None:
1787
child_pb.update(gettext('Apply phase'), 0, 2)
1788
inventory_delta = self._generate_inventory_delta()
1791
inventory_delta = precomputed_delta
1794
mover = _FileMover()
1798
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1799
self._apply_removals(mover)
1800
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1801
modified_paths = self._apply_insertions(mover)
1802
except BaseException:
1806
mover.apply_deletions()
1807
if self.final_file_id(self.root) is None:
1808
inventory_delta = [e for e in inventory_delta if e[0] != '']
1809
self._tree.apply_inventory_delta(inventory_delta)
1810
self._apply_observed_sha1s()
1813
return _TransformResults(modified_paths, self.rename_count)
1815
def _generate_inventory_delta(self):
1816
"""Generate an inventory delta for the current transform."""
1817
inventory_delta = []
1818
new_paths = self._inventory_altered()
1819
total_entries = len(new_paths) + len(self._removed_id)
1820
with ui.ui_factory.nested_progress_bar() as child_pb:
1821
for num, trans_id in enumerate(self._removed_id):
1823
child_pb.update(gettext('removing file'),
1825
if trans_id == self._new_root:
1826
file_id = self._tree.get_root_id()
1828
file_id = self.tree_file_id(trans_id)
1829
# File-id isn't really being deleted, just moved
1830
if file_id in self._r_new_id:
1832
path = self._tree_id_paths[trans_id]
1833
inventory_delta.append((path, None, file_id, None))
1834
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1836
for num, (path, trans_id) in enumerate(new_paths):
1838
child_pb.update(gettext('adding file'),
1839
num + len(self._removed_id), total_entries)
1840
file_id = new_path_file_ids[trans_id]
1843
kind = self.final_kind(trans_id)
1845
kind = self._tree.stored_kind(self._tree.id2path(file_id))
1846
parent_trans_id = self.final_parent(trans_id)
1847
parent_file_id = new_path_file_ids.get(parent_trans_id)
1848
if parent_file_id is None:
1849
parent_file_id = self.final_file_id(parent_trans_id)
1850
if trans_id in self._new_reference_revision:
1851
new_entry = inventory.TreeReference(
1853
self._new_name[trans_id],
1854
self.final_file_id(self._new_parent[trans_id]),
1855
None, self._new_reference_revision[trans_id])
1857
new_entry = inventory.make_entry(kind,
1858
self.final_name(trans_id),
1859
parent_file_id, file_id)
1861
old_path = self._tree.id2path(new_entry.file_id)
1862
except errors.NoSuchId:
1864
new_executability = self._new_executability.get(trans_id)
1865
if new_executability is not None:
1866
new_entry.executable = new_executability
1867
inventory_delta.append(
1868
(old_path, path, new_entry.file_id, new_entry))
1869
return inventory_delta
1871
def _apply_removals(self, mover):
1872
"""Perform tree operations that remove directory/inventory names.
1874
That is, delete files that are to be deleted, and put any files that
1875
need renaming into limbo. This must be done in strict child-to-parent
1878
If inventory_delta is None, no inventory delta generation is performed.
1880
tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
1881
with ui.ui_factory.nested_progress_bar() as child_pb:
1882
for num, (path, trans_id) in enumerate(tree_paths):
1883
# do not attempt to move root into a subdirectory of itself.
1886
child_pb.update(gettext('removing file'), num, len(tree_paths))
1887
full_path = self._tree.abspath(path)
1888
if trans_id in self._removed_contents:
1889
delete_path = os.path.join(self._deletiondir, trans_id)
1890
mover.pre_delete(full_path, delete_path)
1891
elif (trans_id in self._new_name or
1892
trans_id in self._new_parent):
1894
mover.rename(full_path, self._limbo_name(trans_id))
1895
except errors.TransformRenameFailed as e:
1896
if e.errno != errno.ENOENT:
1899
self.rename_count += 1
1901
def _apply_insertions(self, mover):
1902
"""Perform tree operations that insert directory/inventory names.
1904
That is, create any files that need to be created, and restore from
1905
limbo any files that needed renaming. This must be done in strict
1906
parent-to-child order.
1908
If inventory_delta is None, no inventory delta is calculated, and
1909
no list of modified paths is returned.
1911
new_paths = self.new_paths(filesystem_only=True)
1913
with ui.ui_factory.nested_progress_bar() as child_pb:
1914
for num, (path, trans_id) in enumerate(new_paths):
1916
child_pb.update(gettext('adding file'),
1917
num, len(new_paths))
1918
full_path = self._tree.abspath(path)
1919
if trans_id in self._needs_rename:
1921
mover.rename(self._limbo_name(trans_id), full_path)
1922
except errors.TransformRenameFailed as e:
1923
# We may be renaming a dangling inventory id
1924
if e.errno != errno.ENOENT:
1927
self.rename_count += 1
1928
# TODO: if trans_id in self._observed_sha1s, we should
1929
# re-stat the final target, since ctime will be
1930
# updated by the change.
1931
if (trans_id in self._new_contents
1932
or self.path_changed(trans_id)):
1933
if trans_id in self._new_contents:
1934
modified_paths.append(full_path)
1935
if trans_id in self._new_executability:
1936
self._set_executability(path, trans_id)
1937
if trans_id in self._observed_sha1s:
1938
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1939
st = osutils.lstat(full_path)
1940
self._observed_sha1s[trans_id] = (o_sha1, st)
1941
for path, trans_id in new_paths:
1942
# new_paths includes stuff like workingtree conflicts. Only the
1943
# stuff in new_contents actually comes from limbo.
1944
if trans_id in self._limbo_files:
1945
del self._limbo_files[trans_id]
1946
self._new_contents.clear()
1947
return modified_paths
1949
def _apply_observed_sha1s(self):
1950
"""After we have finished renaming everything, update observed sha1s
1952
This has to be done after self._tree.apply_inventory_delta, otherwise
1953
it doesn't know anything about the files we are updating. Also, we want
1954
to do this as late as possible, so that most entries end up cached.
1956
# TODO: this doesn't update the stat information for directories. So
1957
# the first 'bzr status' will still need to rewrite
1958
# .bzr/checkout/dirstate. However, we at least don't need to
1959
# re-read all of the files.
1960
# TODO: If the operation took a while, we could do a time.sleep(3) here
1961
# to allow the clock to tick over and ensure we won't have any
1962
# problems. (we could observe start time, and finish time, and if
1963
# it is less than eg 10% overhead, add a sleep call.)
1964
paths = FinalPaths(self)
1965
for trans_id, observed in viewitems(self._observed_sha1s):
1966
path = paths.get_path(trans_id)
1967
self._tree._observed_sha1(path, observed)
1970
class TransformPreview(DiskTreeTransform):
1971
"""A TreeTransform for generating preview trees.
1973
Unlike TreeTransform, this version works when the input tree is a
1974
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1975
unversioned files in the input tree.
1978
def __init__(self, tree, pb=None, case_sensitive=True):
1980
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1981
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1983
def canonical_path(self, path):
1986
def tree_kind(self, trans_id):
1987
path = self._tree_id_paths.get(trans_id)
1990
kind = self._tree.path_content_summary(path)[0]
1991
if kind == 'missing':
1995
def _set_mode(self, trans_id, mode_id, typefunc):
1996
"""Set the mode of new file contents.
1997
The mode_id is the existing file to get the mode from (often the same
1998
as trans_id). The operation is only performed if there's a mode match
1999
according to typefunc.
2001
# is it ok to ignore this? probably
2004
def iter_tree_children(self, parent_id):
2005
"""Iterate through the entry's tree children, if any"""
2007
path = self._tree_id_paths[parent_id]
2011
entry = next(self._tree.iter_entries_by_dir(
2012
specific_files=[path]))[1]
2013
except StopIteration:
2015
children = getattr(entry, 'children', {})
2016
for child in children:
2017
childpath = joinpath(path, child)
2018
yield self.trans_id_tree_path(childpath)
2020
def new_orphan(self, trans_id, parent_id):
2021
raise NotImplementedError(self.new_orphan)
2024
class _PreviewTree(inventorytree.InventoryTree):
2025
"""Partial implementation of Tree to support show_diff_trees"""
2027
def __init__(self, transform):
2028
self._transform = transform
2029
self._final_paths = FinalPaths(transform)
2030
self.__by_parent = None
2031
self._parent_ids = []
2032
self._all_children_cache = {}
2033
self._path2trans_id_cache = {}
2034
self._final_name_cache = {}
2035
self._iter_changes_cache = dict((c.file_id, c) for c in
2036
self._transform.iter_changes())
2038
def _content_change(self, file_id):
2039
"""Return True if the content of this file changed"""
2040
changes = self._iter_changes_cache.get(file_id)
2041
# changes[2] is true if the file content changed. See
2042
# InterTree.iter_changes.
2043
return (changes is not None and changes[2])
2045
def _get_repository(self):
2046
repo = getattr(self._transform._tree, '_repository', None)
2048
repo = self._transform._tree.branch.repository
2051
def _iter_parent_trees(self):
2052
for revision_id in self.get_parent_ids():
2054
yield self.revision_tree(revision_id)
2055
except errors.NoSuchRevisionInTree:
2056
yield self._get_repository().revision_tree(revision_id)
2058
def _get_file_revision(self, path, file_id, vf, tree_revision):
2060
(file_id, t.get_file_revision(t.id2path(file_id)))
2061
for t in self._iter_parent_trees()]
2062
vf.add_lines((file_id, tree_revision), parent_keys,
2063
self.get_file_lines(path))
2064
repo = self._get_repository()
2065
base_vf = repo.texts
2066
if base_vf not in vf.fallback_versionedfiles:
2067
vf.fallback_versionedfiles.append(base_vf)
2068
return tree_revision
2070
def _stat_limbo_file(self, trans_id):
2071
name = self._transform._limbo_name(trans_id)
2072
return os.lstat(name)
2075
def _by_parent(self):
2076
if self.__by_parent is None:
2077
self.__by_parent = self._transform.by_parent()
2078
return self.__by_parent
2080
def _comparison_data(self, entry, path):
2081
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2082
if kind == 'missing':
2086
file_id = self._transform.final_file_id(self._path2trans_id(path))
2087
executable = self.is_executable(path)
2088
return kind, executable, None
2090
def is_locked(self):
2093
def lock_read(self):
2094
# Perhaps in theory, this should lock the TreeTransform?
2095
return lock.LogicalLockResult(self.unlock)
2101
def root_inventory(self):
2102
"""This Tree does not use inventory as its backing data."""
2103
raise NotImplementedError(_PreviewTree.root_inventory)
2105
def get_root_id(self):
2106
return self._transform.final_file_id(self._transform.root)
2108
def all_file_ids(self):
2109
tree_ids = set(self._transform._tree.all_file_ids())
2110
tree_ids.difference_update(self._transform.tree_file_id(t)
2111
for t in self._transform._removed_id)
2112
tree_ids.update(viewvalues(self._transform._new_id))
2115
def all_versioned_paths(self):
2116
tree_paths = set(self._transform._tree.all_versioned_paths())
2118
tree_paths.difference_update(
2119
self._transform.trans_id_tree_path(t)
2120
for t in self._transform._removed_id)
2123
self._final_paths._determine_path(t)
2124
for t in self._transform._new_id)
2128
def _has_id(self, file_id, fallback_check):
2129
if file_id in self._transform._r_new_id:
2131
elif file_id in {self._transform.tree_file_id(trans_id) for
2132
trans_id in self._transform._removed_id}:
2135
return fallback_check(file_id)
2137
def has_id(self, file_id):
2138
return self._has_id(file_id, self._transform._tree.has_id)
2140
def has_or_had_id(self, file_id):
2141
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2143
def _path2trans_id(self, path):
2144
# We must not use None here, because that is a valid value to store.
2145
trans_id = self._path2trans_id_cache.get(path, object)
2146
if trans_id is not object:
2148
segments = splitpath(path)
2149
cur_parent = self._transform.root
2150
for cur_segment in segments:
2151
for child in self._all_children(cur_parent):
2152
final_name = self._final_name_cache.get(child)
2153
if final_name is None:
2154
final_name = self._transform.final_name(child)
2155
self._final_name_cache[child] = final_name
2156
if final_name == cur_segment:
2160
self._path2trans_id_cache[path] = None
2162
self._path2trans_id_cache[path] = cur_parent
2165
def path2id(self, path):
2166
if isinstance(path, list):
2169
path = osutils.pathjoin(*path)
2170
return self._transform.final_file_id(self._path2trans_id(path))
2172
def id2path(self, file_id):
2173
trans_id = self._transform.trans_id_file_id(file_id)
2175
return self._final_paths._determine_path(trans_id)
2177
raise errors.NoSuchId(self, file_id)
2179
def _all_children(self, trans_id):
2180
children = self._all_children_cache.get(trans_id)
2181
if children is not None:
2183
children = set(self._transform.iter_tree_children(trans_id))
2184
# children in the _new_parent set are provided by _by_parent.
2185
children.difference_update(self._transform._new_parent)
2186
children.update(self._by_parent.get(trans_id, []))
2187
self._all_children_cache[trans_id] = children
2190
def _iter_children(self, file_id):
2191
trans_id = self._transform.trans_id_file_id(file_id)
2192
for child_trans_id in self._all_children(trans_id):
2193
yield self._transform.final_file_id(child_trans_id)
2196
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2197
in self._transform._tree.extras())
2198
possible_extras.update(self._transform._new_contents)
2199
possible_extras.update(self._transform._removed_id)
2200
for trans_id in possible_extras:
2201
if self._transform.final_file_id(trans_id) is None:
2202
yield self._final_paths._determine_path(trans_id)
2204
def _make_inv_entries(self, ordered_entries, specific_files=None):
2205
for trans_id, parent_file_id in ordered_entries:
2206
file_id = self._transform.final_file_id(trans_id)
2209
if (specific_files is not None
2210
and self._final_paths.get_path(trans_id) not in specific_files):
2212
kind = self._transform.final_kind(trans_id)
2214
kind = self._transform._tree.stored_kind(
2215
self._transform._tree.id2path(file_id))
2216
new_entry = inventory.make_entry(
2218
self._transform.final_name(trans_id),
2219
parent_file_id, file_id)
2220
yield new_entry, trans_id
2222
def _list_files_by_dir(self):
2223
todo = [ROOT_PARENT]
2225
while len(todo) > 0:
2227
parent_file_id = self._transform.final_file_id(parent)
2228
children = list(self._all_children(parent))
2229
paths = dict(zip(children, self._final_paths.get_paths(children)))
2230
children.sort(key=paths.get)
2231
todo.extend(reversed(children))
2232
for trans_id in children:
2233
ordered_ids.append((trans_id, parent_file_id))
2236
def iter_child_entries(self, path):
2237
trans_id = self._path2trans_id(path)
2238
if trans_id is None:
2239
raise errors.NoSuchFile(path)
2240
todo = [(child_trans_id, trans_id) for child_trans_id in
2241
self._all_children(trans_id)]
2242
for entry, trans_id in self._make_inv_entries(todo):
2245
def iter_entries_by_dir(self, specific_files=None):
2246
# This may not be a maximally efficient implementation, but it is
2247
# reasonably straightforward. An implementation that grafts the
2248
# TreeTransform changes onto the tree's iter_entries_by_dir results
2249
# might be more efficient, but requires tricky inferences about stack
2251
ordered_ids = self._list_files_by_dir()
2252
for entry, trans_id in self._make_inv_entries(ordered_ids,
2254
yield self._final_paths.get_path(trans_id), entry
2256
def _iter_entries_for_dir(self, dir_path):
2257
"""Return path, entry for items in a directory without recursing down."""
2259
dir_trans_id = self._path2trans_id(dir_path)
2260
dir_id = self._transform.final_file_id(dir_trans_id)
2261
for child_trans_id in self._all_children(dir_trans_id):
2262
ordered_ids.append((child_trans_id, dir_id))
2264
for entry, trans_id in self._make_inv_entries(ordered_ids):
2265
path_entries.append((self._final_paths.get_path(trans_id), entry))
2269
def list_files(self, include_root=False, from_dir=None, recursive=True):
2270
"""See WorkingTree.list_files."""
2271
# XXX This should behave like WorkingTree.list_files, but is really
2272
# more like RevisionTree.list_files.
2278
prefix = from_dir + '/'
2279
entries = self.iter_entries_by_dir()
2280
for path, entry in entries:
2281
if entry.name == '' and not include_root:
2284
if not path.startswith(prefix):
2286
path = path[len(prefix):]
2287
yield path, 'V', entry.kind, entry
2289
if from_dir is None and include_root is True:
2290
root_entry = inventory.make_entry(
2291
'directory', '', ROOT_PARENT, self.get_root_id())
2292
yield '', 'V', 'directory', root_entry
2293
entries = self._iter_entries_for_dir(from_dir or '')
2294
for path, entry in entries:
2295
yield path, 'V', entry.kind, entry
2297
def kind(self, path):
2298
trans_id = self._path2trans_id(path)
2299
if trans_id is None:
2300
raise errors.NoSuchFile(path)
2301
return self._transform.final_kind(trans_id)
2303
def stored_kind(self, path):
2304
trans_id = self._path2trans_id(path)
2305
if trans_id is None:
2306
raise errors.NoSuchFile(path)
2308
return self._transform._new_contents[trans_id]
2310
return self._transform._tree.stored_kind(path)
2312
def get_file_mtime(self, path):
2313
"""See Tree.get_file_mtime"""
2314
file_id = self.path2id(path)
2316
raise errors.NoSuchFile(path)
2317
if not self._content_change(file_id):
2318
return self._transform._tree.get_file_mtime(
2319
self._transform._tree.id2path(file_id))
2320
trans_id = self._path2trans_id(path)
2321
return self._stat_limbo_file(trans_id).st_mtime
2323
def get_file_size(self, path):
2324
"""See Tree.get_file_size"""
2325
trans_id = self._path2trans_id(path)
2326
if trans_id is None:
2327
raise errors.NoSuchFile(path)
2328
kind = self._transform.final_kind(trans_id)
2331
if trans_id in self._transform._new_contents:
2332
return self._stat_limbo_file(trans_id).st_size
2333
if self.kind(path) == 'file':
2334
return self._transform._tree.get_file_size(path)
2338
def get_file_verifier(self, path, stat_value=None):
2339
trans_id = self._path2trans_id(path)
2340
if trans_id is None:
2341
raise errors.NoSuchFile(path)
2342
kind = self._transform._new_contents.get(trans_id)
2344
return self._transform._tree.get_file_verifier(path)
2346
with self.get_file(path) as fileobj:
2347
return ("SHA1", sha_file(fileobj))
2349
def get_file_sha1(self, path, stat_value=None):
2350
trans_id = self._path2trans_id(path)
2351
if trans_id is None:
2352
raise errors.NoSuchFile(path)
2353
kind = self._transform._new_contents.get(trans_id)
2355
return self._transform._tree.get_file_sha1(path)
2357
with self.get_file(path) as fileobj:
2358
return sha_file(fileobj)
2360
def is_executable(self, path):
2361
trans_id = self._path2trans_id(path)
2362
if trans_id is None:
2365
return self._transform._new_executability[trans_id]
2368
return self._transform._tree.is_executable(path)
2369
except OSError as e:
2370
if e.errno == errno.ENOENT:
2373
except errors.NoSuchFile:
2376
def has_filename(self, path):
2377
trans_id = self._path2trans_id(path)
2378
if trans_id in self._transform._new_contents:
2380
elif trans_id in self._transform._removed_contents:
2383
return self._transform._tree.has_filename(path)
2385
def path_content_summary(self, path):
2386
trans_id = self._path2trans_id(path)
2387
tt = self._transform
2388
tree_path = tt._tree_id_paths.get(trans_id)
2389
kind = tt._new_contents.get(trans_id)
2391
if tree_path is None or trans_id in tt._removed_contents:
2392
return 'missing', None, None, None
2393
summary = tt._tree.path_content_summary(tree_path)
2394
kind, size, executable, link_or_sha1 = summary
2397
limbo_name = tt._limbo_name(trans_id)
2398
if trans_id in tt._new_reference_revision:
2399
kind = 'tree-reference'
2401
statval = os.lstat(limbo_name)
2402
size = statval.st_size
2403
if not tt._limbo_supports_executable():
2406
executable = statval.st_mode & S_IEXEC
2410
if kind == 'symlink':
2411
link_or_sha1 = os.readlink(limbo_name)
2412
if not isinstance(link_or_sha1, text_type):
2413
link_or_sha1 = link_or_sha1.decode(osutils._fs_enc)
2414
executable = tt._new_executability.get(trans_id, executable)
2415
return kind, size, executable, link_or_sha1
2417
def iter_changes(self, from_tree, include_unchanged=False,
2418
specific_files=None, pb=None, extra_trees=None,
2419
require_versioned=True, want_unversioned=False):
2420
"""See InterTree.iter_changes.
2422
This has a fast path that is only used when the from_tree matches
2423
the transform tree, and no fancy options are supplied.
2425
if (from_tree is not self._transform._tree or include_unchanged
2426
or specific_files or want_unversioned):
2427
return tree.InterTree(from_tree, self).iter_changes(
2428
include_unchanged=include_unchanged,
2429
specific_files=specific_files,
2431
extra_trees=extra_trees,
2432
require_versioned=require_versioned,
2433
want_unversioned=want_unversioned)
2434
if want_unversioned:
2435
raise ValueError('want_unversioned is not supported')
2436
return self._transform.iter_changes()
2438
def get_file(self, path):
2439
"""See Tree.get_file"""
2440
file_id = self.path2id(path)
2441
if not self._content_change(file_id):
2442
return self._transform._tree.get_file(path)
2443
trans_id = self._path2trans_id(path)
2444
name = self._transform._limbo_name(trans_id)
2445
return open(name, 'rb')
2447
def get_file_with_stat(self, path):
2448
return self.get_file(path), None
2450
def annotate_iter(self, path,
2451
default_revision=_mod_revision.CURRENT_REVISION):
2452
file_id = self.path2id(path)
2453
changes = self._iter_changes_cache.get(file_id)
2457
changed_content, versioned, kind = (
2458
changes.changed_content, changes.versioned, changes.kind)
2461
get_old = (kind[0] == 'file' and versioned[0])
2463
old_annotation = self._transform._tree.annotate_iter(
2464
path, default_revision=default_revision)
2468
return old_annotation
2469
if not changed_content:
2470
return old_annotation
2471
# TODO: This is doing something similar to what WT.annotate_iter is
2472
# doing, however it fails slightly because it doesn't know what
2473
# the *other* revision_id is, so it doesn't know how to give the
2474
# other as the origin for some lines, they all get
2475
# 'default_revision'
2476
# It would be nice to be able to use the new Annotator based
2477
# approach, as well.
2478
return annotate.reannotate([old_annotation],
2479
self.get_file(path).readlines(),
2482
def get_symlink_target(self, path):
2483
"""See Tree.get_symlink_target"""
2484
file_id = self.path2id(path)
2485
if not self._content_change(file_id):
2486
return self._transform._tree.get_symlink_target(path)
2487
trans_id = self._path2trans_id(path)
2488
name = self._transform._limbo_name(trans_id)
2489
return osutils.readlink(name)
2491
def walkdirs(self, prefix=''):
2492
pending = [self._transform.root]
2493
while len(pending) > 0:
2494
parent_id = pending.pop()
2497
prefix = prefix.rstrip('/')
2498
parent_path = self._final_paths.get_path(parent_id)
2499
parent_file_id = self._transform.final_file_id(parent_id)
2500
for child_id in self._all_children(parent_id):
2501
path_from_root = self._final_paths.get_path(child_id)
2502
basename = self._transform.final_name(child_id)
2503
file_id = self._transform.final_file_id(child_id)
2504
kind = self._transform.final_kind(child_id)
2505
if kind is not None:
2506
versioned_kind = kind
2509
versioned_kind = self._transform._tree.stored_kind(
2510
self._transform._tree.id2path(file_id))
2511
if versioned_kind == 'directory':
2512
subdirs.append(child_id)
2513
children.append((path_from_root, basename, kind, None,
2514
file_id, versioned_kind))
2516
if parent_path.startswith(prefix):
2517
yield (parent_path, parent_file_id), children
2518
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2521
def get_parent_ids(self):
2522
return self._parent_ids
2524
def set_parent_ids(self, parent_ids):
2525
self._parent_ids = parent_ids
2527
def get_revision_tree(self, revision_id):
2528
return self._transform._tree.get_revision_tree(revision_id)
864
2531
def joinpath(parent, child):
865
2532
"""Join tree-relative paths, handling the tree root specially"""
866
2533
if parent is None or parent == "":
896
2564
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2565
return self._known_paths[trans_id]
2567
def get_paths(self, trans_ids):
2568
return [(self.get_path(t), t) for t in trans_ids]
899
2571
def topology_sorted_ids(tree):
900
2572
"""Determine the topological order of the ids in a tree"""
901
2573
file_ids = list(tree)
902
2574
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2578
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2579
delta_from_tree=False):
2580
"""Create working tree for a branch, using a TreeTransform.
2582
This function should be used on empty trees, having a tree root at most.
2583
(see merge and revert functionality for working with existing trees)
2585
Existing files are handled like so:
2587
- Existing bzrdirs take precedence over creating new items. They are
2588
created as '%s.diverted' % name.
2589
- Otherwise, if the content on disk matches the content we are building,
2590
it is silently replaced.
2591
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2593
:param tree: The tree to convert wt into a copy of
2594
:param wt: The working tree that files will be placed into
2595
:param accelerator_tree: A tree which can be used for retrieving file
2596
contents more quickly than tree itself, i.e. a workingtree. tree
2597
will be used for cases where accelerator_tree's content is different.
2598
:param hardlink: If true, hard-link files to accelerator_tree, where
2599
possible. accelerator_tree must implement abspath, i.e. be a
2601
:param delta_from_tree: If true, build_tree may use the input Tree to
2602
generate the inventory delta.
2604
with wt.lock_tree_write(), tree.lock_read():
2605
if accelerator_tree is not None:
2606
accelerator_tree.lock_read()
2608
return _build_tree(tree, wt, accelerator_tree, hardlink,
2611
if accelerator_tree is not None:
2612
accelerator_tree.unlock()
2615
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2616
"""See build_tree."""
2617
for num, _unused in enumerate(wt.all_versioned_paths()):
2618
if num > 0: # more than just a root
2619
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2620
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2621
top_pb = ui.ui_factory.nested_progress_bar()
909
2622
pp = ProgressPhase("Build phase", 2, top_pb)
910
tt = TreeTransform(wt)
2623
if tree.get_root_id() is not None:
2624
# This is kind of a hack: we should be altering the root
2625
# as part of the regular tree shape diff logic.
2626
# The conditional test here is to avoid doing an
2627
# expensive operation (flush) every time the root id
2628
# is set within the tree, nor setting the root and thus
2629
# marking the tree as dirty, because we use two different
2630
# idioms here: tree interfaces and inventory interfaces.
2631
if wt.get_root_id() != tree.get_root_id():
2632
wt.set_root_id(tree.get_root_id())
2634
tt = wt.get_transform()
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2638
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_path('')
2639
with ui.ui_factory.nested_progress_bar() as pb:
2640
deferred_contents = []
2642
total = len(tree.all_versioned_paths())
2644
precomputed_delta = []
2646
precomputed_delta = None
2647
# Check if tree inventory has content. If so, we populate
2648
# existing_files with the directory content. If there are no
2649
# entries we skip populating existing_files as its not used.
2650
# This improves performance and unncessary work on large
2651
# directory trees. (#501307)
2653
existing_files = set()
2654
for dir, files in wt.walkdirs():
2655
existing_files.update(f[0] for f in files)
2656
for num, (tree_path, entry) in \
2657
enumerate(tree.iter_entries_by_dir()):
2658
pb.update(gettext("Building tree"), num
2659
- len(deferred_contents), total)
920
2660
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2663
file_id = entry.file_id
2665
precomputed_delta.append((None, tree_path, file_id, entry))
2666
if tree_path in existing_files:
2667
target_path = wt.abspath(tree_path)
2668
kind = file_kind(target_path)
2669
if kind == "directory":
2671
controldir.ControlDir.open(target_path)
2672
except errors.NotBranchError:
2676
if (file_id not in divert
2678
tree, entry, tree_path, kind, target_path)):
2679
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2680
if kind == 'directory':
924
2682
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2683
if entry.kind == 'file':
2684
# We *almost* replicate new_by_entry, so that we can defer
2685
# getting the file text, and get them all at once.
2686
trans_id = tt.create_path(entry.name, parent_id)
2687
file_trans_id[file_id] = trans_id
2688
tt.version_file(file_id, trans_id)
2689
executable = tree.is_executable(tree_path)
2691
tt.set_executability(executable, trans_id)
2692
trans_data = (trans_id, file_id,
2693
tree_path, entry.text_sha1)
2694
deferred_contents.append((tree_path, trans_data))
2696
file_trans_id[file_id] = new_by_entry(
2697
tree_path, tt, entry, parent_id, tree)
2699
new_trans_id = file_trans_id[file_id]
2700
old_parent = tt.trans_id_tree_path(tree_path)
2701
_reparent_children(tt, old_parent, new_trans_id)
2702
offset = num + 1 - len(deferred_contents)
2703
_create_files(tt, tree, deferred_contents, pb, offset,
2704
accelerator_tree, hardlink)
2706
divert_trans = set(file_trans_id[f] for f in divert)
2709
return resolve_checkout(t, c, divert_trans)
2710
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2711
if len(raw_conflicts) > 0:
2712
precomputed_delta = None
2713
conflicts = cook_conflicts(raw_conflicts, tt)
2714
for conflict in conflicts:
2715
trace.warning(text_type(conflict))
2717
wt.add_conflicts(conflicts)
2718
except errors.UnsupportedOperation:
2720
result = tt.apply(no_conflicts=True,
2721
precomputed_delta=precomputed_delta)
933
2724
top_pb.finished()
935
def new_by_entry(tt, entry, parent_id, tree):
2728
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2730
total = len(desired_files) + offset
2732
if accelerator_tree is None:
2733
new_desired_files = desired_files
2735
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2737
change.path for change in iter
2738
if not (change.changed_content or change.executable[0] != change.executable[1])]
2739
if accelerator_tree.supports_content_filtering():
2740
unchanged = [(tp, ap) for (tp, ap) in unchanged
2741
if not next(accelerator_tree.iter_search_rules([ap]))]
2742
unchanged = dict(unchanged)
2743
new_desired_files = []
2745
for unused_tree_path, (trans_id, file_id, tree_path, text_sha1) in desired_files:
2746
accelerator_path = unchanged.get(tree_path)
2747
if accelerator_path is None:
2748
new_desired_files.append((tree_path,
2749
(trans_id, file_id, tree_path, text_sha1)))
2751
pb.update(gettext('Adding file contents'), count + offset, total)
2753
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2756
with accelerator_tree.get_file(accelerator_path) as f:
2757
chunks = osutils.file_iterator(f)
2758
if wt.supports_content_filtering():
2759
filters = wt._content_filter_stack(tree_path)
2760
chunks = filtered_output_bytes(chunks, filters,
2761
ContentFilterContext(tree_path, tree))
2762
tt.create_file(chunks, trans_id, sha1=text_sha1)
2765
for count, ((trans_id, file_id, tree_path, text_sha1), contents) in enumerate(
2766
tree.iter_files_bytes(new_desired_files)):
2767
if wt.supports_content_filtering():
2768
filters = wt._content_filter_stack(tree_path)
2769
contents = filtered_output_bytes(contents, filters,
2770
ContentFilterContext(tree_path, tree))
2771
tt.create_file(contents, trans_id, sha1=text_sha1)
2772
pb.update(gettext('Adding file contents'), count + offset, total)
2775
def _reparent_children(tt, old_parent, new_parent):
2776
for child in tt.iter_tree_children(old_parent):
2777
tt.adjust_path(tt.final_name(child), new_parent, child)
2780
def _reparent_transform_children(tt, old_parent, new_parent):
2781
by_parent = tt.by_parent()
2782
for child in by_parent[old_parent]:
2783
tt.adjust_path(tt.final_name(child), new_parent, child)
2784
return by_parent[old_parent]
2787
def _content_match(tree, entry, tree_path, kind, target_path):
2788
if entry.kind != kind:
2790
if entry.kind == "directory":
2792
if entry.kind == "file":
2793
with open(target_path, 'rb') as f1, \
2794
tree.get_file(tree_path) as f2:
2795
if osutils.compare_files(f1, f2):
2797
elif entry.kind == "symlink":
2798
if tree.get_symlink_target(tree_path) == os.readlink(target_path):
2803
def resolve_checkout(tt, conflicts, divert):
2804
new_conflicts = set()
2805
for c_type, conflict in ((c[0], c) for c in conflicts):
2806
# Anything but a 'duplicate' would indicate programmer error
2807
if c_type != 'duplicate':
2808
raise AssertionError(c_type)
2809
# Now figure out which is new and which is old
2810
if tt.new_contents(conflict[1]):
2811
new_file = conflict[1]
2812
old_file = conflict[2]
2814
new_file = conflict[2]
2815
old_file = conflict[1]
2817
# We should only get here if the conflict wasn't completely
2819
final_parent = tt.final_parent(old_file)
2820
if new_file in divert:
2821
new_name = tt.final_name(old_file) + '.diverted'
2822
tt.adjust_path(new_name, final_parent, new_file)
2823
new_conflicts.add((c_type, 'Diverted to',
2824
new_file, old_file))
2826
new_name = tt.final_name(old_file) + '.moved'
2827
tt.adjust_path(new_name, final_parent, old_file)
2828
new_conflicts.add((c_type, 'Moved existing file to',
2829
old_file, new_file))
2830
return new_conflicts
2833
def new_by_entry(path, tt, entry, parent_id, tree):
936
2834
"""Create a new file according to its inventory entry"""
937
2835
name = entry.name
938
2836
kind = entry.kind
939
2837
if kind == 'file':
940
contents = tree.get_file(entry.file_id).readlines()
941
executable = tree.is_executable(entry.file_id)
942
return tt.new_file(name, parent_id, contents, entry.file_id,
944
elif kind == 'directory':
945
return tt.new_directory(name, parent_id, entry.file_id)
2838
with tree.get_file(path) as f:
2839
executable = tree.is_executable(path)
2841
name, parent_id, osutils.file_iterator(f), entry.file_id,
2843
elif kind in ('directory', 'tree-reference'):
2844
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2845
if kind == 'tree-reference':
2846
tt.set_tree_reference(entry.reference_revision, trans_id)
946
2848
elif kind == 'symlink':
947
target = tree.get_symlink_target(entry.file_id)
2849
target = tree.get_symlink_target(path)
948
2850
return tt.new_symlink(name, parent_id, target, entry.file_id)
950
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
951
"""Create new file contents according to an inventory entry."""
952
if entry.kind == "file":
954
lines = tree.get_file(entry.file_id).readlines()
955
tt.create_file(lines, trans_id, mode_id=mode_id)
956
elif entry.kind == "symlink":
957
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
958
elif entry.kind == "directory":
2852
raise errors.BadFileKindError(name, kind)
2855
def create_from_tree(tt, trans_id, tree, path, chunks=None,
2856
filter_tree_path=None):
2857
"""Create new file contents according to tree contents.
2859
:param filter_tree_path: the tree path to use to lookup
2860
content filters to apply to the bytes output in the working tree.
2861
This only applies if the working tree supports content filtering.
2863
kind = tree.kind(path)
2864
if kind == 'directory':
959
2865
tt.create_directory(trans_id)
2866
elif kind == "file":
2868
f = tree.get_file(path)
2869
chunks = osutils.file_iterator(f)
2874
if wt.supports_content_filtering() and filter_tree_path is not None:
2875
filters = wt._content_filter_stack(filter_tree_path)
2876
chunks = filtered_output_bytes(
2878
ContentFilterContext(filter_tree_path, tree))
2879
tt.create_file(chunks, trans_id)
2883
elif kind == "symlink":
2884
tt.create_symlink(tree.get_symlink_target(path), trans_id)
2886
raise AssertionError('Unknown kind %r' % kind)
961
2889
def create_entry_executability(tt, entry, trans_id):
962
2890
"""Set the executability of a trans_id according to an inventory entry"""
964
2892
tt.set_executability(entry.executable, trans_id)
967
def find_interesting(working_tree, target_tree, filenames):
968
"""Find the ids corresponding to specified filenames."""
970
interesting_ids = None
972
interesting_ids = set()
973
for tree_path in filenames:
975
for tree in (working_tree, target_tree):
976
file_id = tree.inventory.path2id(tree_path)
977
if file_id is not None:
978
interesting_ids.add(file_id)
981
raise NotVersionedError(path=tree_path)
982
return interesting_ids
985
def change_entry(tt, file_id, working_tree, target_tree,
986
trans_id_file_id, backups, trans_id, by_parent):
987
"""Replace a file_id's contents with those from a target tree."""
988
e_trans_id = trans_id_file_id(file_id)
989
entry = target_tree.inventory[file_id]
990
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
996
tt.delete_contents(e_trans_id)
998
parent_trans_id = trans_id_file_id(entry.parent_id)
999
backup_name = get_backup_name(entry, by_parent,
1000
parent_trans_id, tt)
1001
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1002
tt.unversion_file(e_trans_id)
1003
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1004
tt.version_file(file_id, e_trans_id)
1005
trans_id[file_id] = e_trans_id
1006
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1007
create_entry_executability(tt, entry, e_trans_id)
1010
tt.set_executability(entry.executable, e_trans_id)
1011
if tt.final_name(e_trans_id) != entry.name:
1014
parent_id = tt.final_parent(e_trans_id)
1015
parent_file_id = tt.final_file_id(parent_id)
1016
if parent_file_id != entry.parent_id:
1021
parent_trans_id = trans_id_file_id(entry.parent_id)
1022
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
1025
def get_backup_name(entry, by_parent, parent_trans_id, tt):
1026
"""Produce a backup-style name that appears to be available"""
1030
yield "%s.~%d~" % (entry.name, counter)
1032
for name in name_gen():
1033
if not tt.has_named_child(by_parent, parent_trans_id, name):
1036
def _entry_changes(file_id, entry, working_tree):
1037
"""Determine in which ways the inventory entry has changed.
1039
Returns booleans: has_contents, content_mod, meta_mod
1040
has_contents means there are currently contents, but they differ
1041
contents_mod means contents need to be modified
1042
meta_mod means the metadata needs to be modified
1044
cur_entry = working_tree.inventory[file_id]
1046
working_kind = working_tree.kind(file_id)
1049
if e.errno != errno.ENOENT:
1051
has_contents = False
1054
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
1059
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
cur_entry._forget_tree_state()
1065
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2895
def revert(working_tree, target_tree, filenames, backups=False,
2896
pb=None, change_reporter=None):
1070
2897
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
1075
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2898
pb = ui.ui_factory.nested_progress_bar()
2900
with target_tree.lock_read(), working_tree.get_transform(pb) as tt:
2901
pp = ProgressPhase("Revert phase", 3, pb)
2902
conflicts, merge_modified = _prepare_revert_transform(
2903
working_tree, target_tree, tt, filenames, backups, pp)
2905
change_reporter = delta._ChangeReporter(
2906
unversioned_filter=working_tree.is_ignored)
2907
delta.report_changes(tt.iter_changes(), change_reporter)
2908
for conflict in conflicts:
2909
trace.warning(text_type(conflict))
2912
if working_tree.supports_merge_modified():
2913
working_tree.set_merge_modified(merge_modified)
2919
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2920
backups, pp, basis_tree=None,
2921
merge_modified=None):
2922
with ui.ui_factory.nested_progress_bar() as child_pb:
2923
if merge_modified is None:
2924
merge_modified = working_tree.merge_modified()
2925
merge_modified = _alter_files(working_tree, target_tree, tt,
2926
child_pb, filenames, backups,
2927
merge_modified, basis_tree)
2928
with ui.ui_factory.nested_progress_bar() as child_pb:
2929
raw_conflicts = resolve_conflicts(
2930
tt, child_pb, lambda t, c: conflict_pass(t, c, target_tree))
2931
conflicts = cook_conflicts(raw_conflicts, tt)
2932
return conflicts, merge_modified
2935
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2936
backups, merge_modified, basis_tree=None):
2937
if basis_tree is not None:
2938
basis_tree.lock_read()
2939
# We ask the working_tree for its changes relative to the target, rather
2940
# than the target changes relative to the working tree. Because WT4 has an
2941
# optimizer to compare itself to a target, but no optimizer for the
2943
change_list = working_tree.iter_changes(
2944
target_tree, specific_files=specific_files, pb=pb)
2945
if not target_tree.is_versioned(u''):
2951
for id_num, change in enumerate(change_list):
2952
file_id = change.file_id
2953
target_path, wt_path = change.path
2954
target_versioned, wt_versioned = change.versioned
2955
target_parent, wt_parent = change.parent_id
2956
target_name, wt_name = change.name
2957
target_kind, wt_kind = change.kind
2958
target_executable, wt_executable = change.executable
2959
if skip_root and wt_parent is None:
2961
trans_id = tt.trans_id_file_id(file_id)
2963
if change.changed_content:
2964
keep_content = False
2965
if wt_kind == 'file' and (backups or target_kind is None):
2966
wt_sha1 = working_tree.get_file_sha1(wt_path)
2967
if merge_modified.get(wt_path) != wt_sha1:
2968
# acquire the basis tree lazily to prevent the
2969
# expense of accessing it when it's not needed ?
2970
# (Guessing, RBC, 200702)
2971
if basis_tree is None:
2972
basis_tree = working_tree.basis_tree()
2973
basis_tree.lock_read()
2974
basis_path = find_previous_path(
2975
working_tree, basis_tree, wt_path)
2976
if basis_path is None:
2977
if target_kind is None and not target_versioned:
2980
if wt_sha1 != basis_tree.get_file_sha1(basis_path):
2982
if wt_kind is not None:
2983
if not keep_content:
1121
2984
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2985
elif target_kind is not None:
2986
parent_trans_id = tt.trans_id_file_id(wt_parent)
2987
backup_name = tt._available_backup_name(
2988
wt_name, parent_trans_id)
2989
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2990
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2991
if wt_versioned and target_versioned:
2992
tt.unversion_file(trans_id)
2993
tt.version_file(file_id, new_trans_id)
2994
# New contents should have the same unix perms as old
2997
trans_id = new_trans_id
2998
if target_kind in ('directory', 'tree-reference'):
2999
tt.create_directory(trans_id)
3000
if target_kind == 'tree-reference':
3001
revision = target_tree.get_reference_revision(
3003
tt.set_tree_reference(revision, trans_id)
3004
elif target_kind == 'symlink':
3005
tt.create_symlink(target_tree.get_symlink_target(
3006
target_path), trans_id)
3007
elif target_kind == 'file':
3008
deferred_files.append(
3009
(target_path, (trans_id, mode_id, file_id)))
3010
if basis_tree is None:
3011
basis_tree = working_tree.basis_tree()
3012
basis_tree.lock_read()
3013
new_sha1 = target_tree.get_file_sha1(target_path)
3014
basis_path = find_previous_path(target_tree, basis_tree, target_path)
3015
if (basis_path is not None and
3016
new_sha1 == basis_tree.get_file_sha1(basis_path)):
3017
# If the new contents of the file match what is in basis,
3018
# then there is no need to store in merge_modified.
3019
if basis_path in merge_modified:
3020
del merge_modified[basis_path]
3022
merge_modified[target_path] = new_sha1
3024
# preserve the execute bit when backing up
3025
if keep_content and wt_executable == target_executable:
3026
tt.set_executability(target_executable, trans_id)
3027
elif target_kind is not None:
3028
raise AssertionError(target_kind)
3029
if not wt_versioned and target_versioned:
3030
tt.version_file(file_id, trans_id)
3031
if wt_versioned and not target_versioned:
3032
tt.unversion_file(trans_id)
3033
if (target_name is not None
3034
and (wt_name != target_name or wt_parent != target_parent)):
3035
if target_name == '' and target_parent is None:
3036
parent_trans = ROOT_PARENT
3038
parent_trans = tt.trans_id_file_id(target_parent)
3039
if wt_parent is None and wt_versioned:
3040
tt.adjust_root_path(target_name, parent_trans)
3042
tt.adjust_path(target_name, parent_trans, trans_id)
3043
if wt_executable != target_executable and target_kind == "file":
3044
tt.set_executability(target_executable, trans_id)
3045
if working_tree.supports_content_filtering():
3046
for (trans_id, mode_id, file_id), bytes in (
3047
target_tree.iter_files_bytes(deferred_files)):
3048
# We're reverting a tree to the target tree so using the
3049
# target tree to find the file path seems the best choice
3050
# here IMO - Ian C 27/Oct/2009
3051
filter_tree_path = target_tree.id2path(file_id)
3052
filters = working_tree._content_filter_stack(filter_tree_path)
3053
bytes = filtered_output_bytes(
3055
ContentFilterContext(filter_tree_path, working_tree))
3056
tt.create_file(bytes, trans_id, mode_id)
3058
for (trans_id, mode_id, file_id), bytes in target_tree.iter_files_bytes(
3060
tt.create_file(bytes, trans_id, mode_id)
3061
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
3063
if basis_tree is not None:
3065
return merge_modified
3068
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3069
"""Make many conflict-resolution attempts, but die if they fail"""
3070
if pass_func is None:
3071
pass_func = conflict_pass
1145
3072
new_conflicts = set()
3073
with ui.ui_factory.nested_progress_bar() as pb:
1147
3074
for n in range(10):
1148
pb.update('Resolution pass', n+1, 10)
3075
pb.update(gettext('Resolution pass'), n + 1, 10)
1149
3076
conflicts = tt.find_conflicts()
1150
3077
if len(conflicts) == 0:
1151
3078
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3079
new_conflicts.update(pass_func(tt, conflicts))
1153
3080
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3083
def conflict_pass(tt, conflicts, path_tree=None):
3084
"""Resolve some classes of conflicts.
3086
:param tt: The transform to resolve conflicts in
3087
:param conflicts: The conflicts to resolve
3088
:param path_tree: A Tree to get supplemental paths from
1160
3090
new_conflicts = set()
1161
3091
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3092
if c_type == 'duplicate id':