861
830
self.create_symlink(target, trans_id)
833
def new_orphan(self, trans_id, parent_id):
834
"""Schedule an item to be orphaned.
836
When a directory is about to be removed, its children, if they are not
837
versioned are moved out of the way: they don't have a parent anymore.
839
:param trans_id: The trans_id of the existing item.
840
:param parent_id: The parent trans_id of the item.
842
raise NotImplementedError(self.new_orphan)
844
def _get_potential_orphans(self, dir_id):
845
"""Find the potential orphans in a directory.
847
A directory can't be safely deleted if there are versioned files in it.
848
If all the contained files are unversioned then they can be orphaned.
850
The 'None' return value means that the directory contains at least one
851
versioned file and should not be deleted.
853
:param dir_id: The directory trans id.
855
:return: A list of the orphan trans ids or None if at least one
856
versioned file is present.
859
# Find the potential orphans, stop if one item should be kept
860
for child_tid in self.by_parent()[dir_id]:
861
if child_tid in self._removed_contents:
862
# The child is removed as part of the transform. Since it was
863
# versioned before, it's not an orphan
865
elif self.final_file_id(child_tid) is None:
866
# The child is not versioned
867
orphans.append(child_tid)
869
# We have a versioned file here, searching for orphans is
875
def _affected_ids(self):
876
"""Return the set of transform ids affected by the transform"""
877
trans_ids = set(self._removed_id)
878
trans_ids.update(self._new_id)
879
trans_ids.update(self._removed_contents)
880
trans_ids.update(self._new_contents)
881
trans_ids.update(self._new_executability)
882
trans_ids.update(self._new_name)
883
trans_ids.update(self._new_parent)
886
def _get_file_id_maps(self):
887
"""Return mapping of file_ids to trans_ids in the to and from states"""
888
trans_ids = self._affected_ids()
891
# Build up two dicts: trans_ids associated with file ids in the
892
# FROM state, vs the TO state.
893
for trans_id in trans_ids:
894
from_file_id = self.tree_file_id(trans_id)
895
if from_file_id is not None:
896
from_trans_ids[from_file_id] = trans_id
897
to_file_id = self.final_file_id(trans_id)
898
if to_file_id is not None:
899
to_trans_ids[to_file_id] = trans_id
900
return from_trans_ids, to_trans_ids
902
def _from_file_data(self, from_trans_id, from_versioned, from_path):
903
"""Get data about a file in the from (tree) state
905
Return a (name, parent, kind, executable) tuple
907
from_path = self._tree_id_paths.get(from_trans_id)
909
# get data from working tree if versioned
910
from_entry = next(self._tree.iter_entries_by_dir(
911
specific_files=[from_path]))[1]
912
from_name = from_entry.name
913
from_parent = from_entry.parent_id
916
if from_path is None:
917
# File does not exist in FROM state
921
# File exists, but is not versioned. Have to use path-
923
from_name = os.path.basename(from_path)
924
tree_parent = self.get_tree_parent(from_trans_id)
925
from_parent = self.tree_file_id(tree_parent)
926
if from_path is not None:
927
from_kind, from_executable, from_stats = \
928
self._tree._comparison_data(from_entry, from_path)
931
from_executable = False
932
return from_name, from_parent, from_kind, from_executable
934
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
935
"""Get data about a file in the to (target) state
937
Return a (name, parent, kind, executable) tuple
939
to_name = self.final_name(to_trans_id)
940
to_kind = self.final_kind(to_trans_id)
941
to_parent = self.final_file_id(self.final_parent(to_trans_id))
942
if to_trans_id in self._new_executability:
943
to_executable = self._new_executability[to_trans_id]
944
elif to_trans_id == from_trans_id:
945
to_executable = from_executable
947
to_executable = False
948
return to_name, to_parent, to_kind, to_executable
950
def iter_changes(self):
951
"""Produce output in the same format as Tree.iter_changes.
953
Will produce nonsensical results if invoked while inventory/filesystem
954
conflicts (as reported by TreeTransform.find_conflicts()) are present.
956
This reads the Transform, but only reproduces changes involving a
957
file_id. Files that are not versioned in either of the FROM or TO
958
states are not reflected.
960
final_paths = FinalPaths(self)
961
from_trans_ids, to_trans_ids = self._get_file_id_maps()
963
# Now iterate through all active file_ids
964
for file_id in set(from_trans_ids).union(to_trans_ids):
966
from_trans_id = from_trans_ids.get(file_id)
967
# find file ids, and determine versioning state
968
if from_trans_id is None:
969
from_versioned = False
970
from_trans_id = to_trans_ids[file_id]
972
from_versioned = True
973
to_trans_id = to_trans_ids.get(file_id)
974
if to_trans_id is None:
976
to_trans_id = from_trans_id
980
if not from_versioned:
983
from_path = self._tree_id_paths.get(from_trans_id)
987
to_path = final_paths.get_path(to_trans_id)
989
from_name, from_parent, from_kind, from_executable = \
990
self._from_file_data(from_trans_id, from_versioned, from_path)
992
to_name, to_parent, to_kind, to_executable = \
993
self._to_file_data(to_trans_id, from_trans_id, from_executable)
995
if from_kind != to_kind:
997
elif to_kind in ('file', 'symlink') and (
998
to_trans_id != from_trans_id
999
or to_trans_id in self._new_contents):
1001
if (not modified and from_versioned == to_versioned
1002
and from_parent == to_parent and from_name == to_name
1003
and from_executable == to_executable):
1005
results.append((file_id, (from_path, to_path), modified,
1006
(from_versioned, to_versioned),
1007
(from_parent, to_parent),
1008
(from_name, to_name),
1009
(from_kind, to_kind),
1010
(from_executable, to_executable)))
1014
return (paths[0] or '', paths[1] or '')
1015
return iter(sorted(results, key=path_key))
1017
def get_preview_tree(self):
1018
"""Return a tree representing the result of the transform.
1020
The tree is a snapshot, and altering the TreeTransform will invalidate
1023
return _PreviewTree(self)
1025
def commit(self, branch, message, merge_parents=None, strict=False,
1026
timestamp=None, timezone=None, committer=None, authors=None,
1027
revprops=None, revision_id=None):
1028
"""Commit the result of this TreeTransform to a branch.
1030
:param branch: The branch to commit to.
1031
:param message: The message to attach to the commit.
1032
:param merge_parents: Additional parent revision-ids specified by
1034
:param strict: If True, abort the commit if there are unversioned
1036
:param timestamp: if not None, seconds-since-epoch for the time and
1037
date. (May be a float.)
1038
:param timezone: Optional timezone for timestamp, as an offset in
1040
:param committer: Optional committer in email-id format.
1041
(e.g. "J Random Hacker <jrandom@example.com>")
1042
:param authors: Optional list of authors in email-id format.
1043
:param revprops: Optional dictionary of revision properties.
1044
:param revision_id: Optional revision id. (Specifying a revision-id
1045
may reduce performance for some non-native formats.)
1046
:return: The revision_id of the revision committed.
1048
self._check_malformed()
1050
unversioned = set(self._new_contents).difference(set(self._new_id))
1051
for trans_id in unversioned:
1052
if self.final_file_id(trans_id) is None:
1053
raise errors.StrictCommitFailed()
1055
revno, last_rev_id = branch.last_revision_info()
1056
if last_rev_id == _mod_revision.NULL_REVISION:
1057
if merge_parents is not None:
1058
raise ValueError('Cannot supply merge parents for first'
1062
parent_ids = [last_rev_id]
1063
if merge_parents is not None:
1064
parent_ids.extend(merge_parents)
1065
if self._tree.get_revision_id() != last_rev_id:
1066
raise ValueError('TreeTransform not based on branch basis: %s' %
1067
self._tree.get_revision_id().decode('utf-8'))
1068
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1069
builder = branch.get_commit_builder(parent_ids,
1070
timestamp=timestamp,
1072
committer=committer,
1074
revision_id=revision_id)
1075
preview = self.get_preview_tree()
1076
list(builder.record_iter_changes(preview, last_rev_id,
1077
self.iter_changes()))
1078
builder.finish_inventory()
1079
revision_id = builder.commit(message)
1080
branch.set_last_revision_info(revno + 1, revision_id)
1083
def _text_parent(self, trans_id):
1084
path = self.tree_path(trans_id)
1086
if path is None or self._tree.kind(path) != 'file':
1088
except errors.NoSuchFile:
1092
def _get_parents_texts(self, trans_id):
1093
"""Get texts for compression parents of this file."""
1094
path = self._text_parent(trans_id)
1097
return (self._tree.get_file_text(path),)
1099
def _get_parents_lines(self, trans_id):
1100
"""Get lines for compression parents of this file."""
1101
path = self._text_parent(trans_id)
1104
return (self._tree.get_file_lines(path),)
1106
def serialize(self, serializer):
1107
"""Serialize this TreeTransform.
1109
:param serializer: A Serialiser like pack.ContainerSerializer.
1111
new_name = {k.encode('utf-8'): v.encode('utf-8')
1112
for k, v in viewitems(self._new_name)}
1113
new_parent = {k.encode('utf-8'): v.encode('utf-8')
1114
for k, v in viewitems(self._new_parent)}
1115
new_id = {k.encode('utf-8'): v
1116
for k, v in viewitems(self._new_id)}
1117
new_executability = {k.encode('utf-8'): int(v)
1118
for k, v in viewitems(self._new_executability)}
1119
tree_path_ids = {k.encode('utf-8'): v.encode('utf-8')
1120
for k, v in viewitems(self._tree_path_ids)}
1121
non_present_ids = {k: v.encode('utf-8')
1122
for k, v in viewitems(self._non_present_ids)}
1123
removed_contents = [trans_id.encode('utf-8')
1124
for trans_id in self._removed_contents]
1125
removed_id = [trans_id.encode('utf-8')
1126
for trans_id in self._removed_id]
1128
b'_id_number': self._id_number,
1129
b'_new_name': new_name,
1130
b'_new_parent': new_parent,
1131
b'_new_executability': new_executability,
1133
b'_tree_path_ids': tree_path_ids,
1134
b'_removed_id': removed_id,
1135
b'_removed_contents': removed_contents,
1136
b'_non_present_ids': non_present_ids,
1138
yield serializer.bytes_record(bencode.bencode(attribs),
1140
for trans_id, kind in sorted(viewitems(self._new_contents)):
1142
with open(self._limbo_name(trans_id), 'rb') as cur_file:
1143
lines = cur_file.readlines()
1144
parents = self._get_parents_lines(trans_id)
1145
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1146
content = b''.join(mpdiff.to_patch())
1147
if kind == 'directory':
1149
if kind == 'symlink':
1150
content = self._read_symlink_target(trans_id)
1151
if not isinstance(content, bytes):
1152
content = content.encode('utf-8')
1153
yield serializer.bytes_record(
1154
content, ((trans_id.encode('utf-8'), kind.encode('ascii')),))
1156
def deserialize(self, records):
1157
"""Deserialize a stored TreeTransform.
1159
:param records: An iterable of (names, content) tuples, as per
1160
pack.ContainerPushParser.
1162
names, content = next(records)
1163
attribs = bencode.bdecode(content)
1164
self._id_number = attribs[b'_id_number']
1165
self._new_name = {k.decode('utf-8'): v.decode('utf-8')
1166
for k, v in viewitems(attribs[b'_new_name'])}
1167
self._new_parent = {k.decode('utf-8'): v.decode('utf-8')
1168
for k, v in viewitems(attribs[b'_new_parent'])}
1169
self._new_executability = {
1170
k.decode('utf-8'): bool(v)
1171
for k, v in viewitems(attribs[b'_new_executability'])}
1172
self._new_id = {k.decode('utf-8'): v
1173
for k, v in viewitems(attribs[b'_new_id'])}
1174
self._r_new_id = {v: k for k, v in viewitems(self._new_id)}
1175
self._tree_path_ids = {}
1176
self._tree_id_paths = {}
1177
for bytepath, trans_id in viewitems(attribs[b'_tree_path_ids']):
1178
path = bytepath.decode('utf-8')
1179
trans_id = trans_id.decode('utf-8')
1180
self._tree_path_ids[path] = trans_id
1181
self._tree_id_paths[trans_id] = path
1182
self._removed_id = {trans_id.decode('utf-8')
1183
for trans_id in attribs[b'_removed_id']}
1184
self._removed_contents = set(
1185
trans_id.decode('utf-8')
1186
for trans_id in attribs[b'_removed_contents'])
1187
self._non_present_ids = {
1188
k: v.decode('utf-8')
1189
for k, v in viewitems(attribs[b'_non_present_ids'])}
1190
for ((trans_id, kind),), content in records:
1191
trans_id = trans_id.decode('utf-8')
1192
kind = kind.decode('ascii')
1194
mpdiff = multiparent.MultiParent.from_patch(content)
1195
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1196
self.create_file(lines, trans_id)
1197
if kind == 'directory':
1198
self.create_directory(trans_id)
1199
if kind == 'symlink':
1200
self.create_symlink(content.decode('utf-8'), trans_id)
1203
class DiskTreeTransform(TreeTransformBase):
1204
"""Tree transform storing its contents on disk."""
1206
def __init__(self, tree, limbodir, pb=None, case_sensitive=True):
1208
:param tree: The tree that will be transformed, but not necessarily
1210
:param limbodir: A directory where new files can be stored until
1211
they are installed in their proper places
1213
:param case_sensitive: If True, the target of the transform is
1214
case sensitive, not just case preserving.
1216
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1217
self._limbodir = limbodir
1218
self._deletiondir = None
1219
# A mapping of transform ids to their limbo filename
1220
self._limbo_files = {}
1221
self._possibly_stale_limbo_files = set()
1222
# A mapping of transform ids to a set of the transform ids of children
1223
# that their limbo directory has
1224
self._limbo_children = {}
1225
# Map transform ids to maps of child filename to child transform id
1226
self._limbo_children_names = {}
1227
# List of transform ids that need to be renamed from limbo into place
1228
self._needs_rename = set()
1229
self._creation_mtime = None
1230
self._create_symlinks = osutils.supports_symlinks(self._limbodir)
1233
"""Release the working tree lock, if held, clean up limbo dir.
1235
This is required if apply has not been invoked, but can be invoked
1238
if self._tree is None:
1241
limbo_paths = list(viewvalues(self._limbo_files))
1242
limbo_paths.extend(self._possibly_stale_limbo_files)
1243
limbo_paths.sort(reverse=True)
1244
for path in limbo_paths:
1247
except OSError as e:
1248
if e.errno != errno.ENOENT:
1250
# XXX: warn? perhaps we just got interrupted at an
1251
# inconvenient moment, but perhaps files are disappearing
1254
delete_any(self._limbodir)
1256
# We don't especially care *why* the dir is immortal.
1257
raise ImmortalLimbo(self._limbodir)
1259
if self._deletiondir is not None:
1260
delete_any(self._deletiondir)
1262
raise errors.ImmortalPendingDeletion(self._deletiondir)
1264
TreeTransformBase.finalize(self)
1266
def _limbo_supports_executable(self):
1267
"""Check if the limbo path supports the executable bit."""
1268
return osutils.supports_executable(self._limbodir)
1270
def _limbo_name(self, trans_id):
1271
"""Generate the limbo name of a file"""
1272
limbo_name = self._limbo_files.get(trans_id)
1273
if limbo_name is None:
1274
limbo_name = self._generate_limbo_path(trans_id)
1275
self._limbo_files[trans_id] = limbo_name
1278
def _generate_limbo_path(self, trans_id):
1279
"""Generate a limbo path using the trans_id as the relative path.
1281
This is suitable as a fallback, and when the transform should not be
1282
sensitive to the path encoding of the limbo directory.
1284
self._needs_rename.add(trans_id)
1285
return pathjoin(self._limbodir, trans_id)
1287
def adjust_path(self, name, parent, trans_id):
1288
previous_parent = self._new_parent.get(trans_id)
1289
previous_name = self._new_name.get(trans_id)
1290
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1291
if (trans_id in self._limbo_files
1292
and trans_id not in self._needs_rename):
1293
self._rename_in_limbo([trans_id])
1294
if previous_parent != parent:
1295
self._limbo_children[previous_parent].remove(trans_id)
1296
if previous_parent != parent or previous_name != name:
1297
del self._limbo_children_names[previous_parent][previous_name]
1299
def _rename_in_limbo(self, trans_ids):
1300
"""Fix limbo names so that the right final path is produced.
1302
This means we outsmarted ourselves-- we tried to avoid renaming
1303
these files later by creating them with their final names in their
1304
final parents. But now the previous name or parent is no longer
1305
suitable, so we have to rename them.
1307
Even for trans_ids that have no new contents, we must remove their
1308
entries from _limbo_files, because they are now stale.
1310
for trans_id in trans_ids:
1311
old_path = self._limbo_files[trans_id]
1312
self._possibly_stale_limbo_files.add(old_path)
1313
del self._limbo_files[trans_id]
1314
if trans_id not in self._new_contents:
1316
new_path = self._limbo_name(trans_id)
1317
os.rename(old_path, new_path)
1318
self._possibly_stale_limbo_files.remove(old_path)
1319
for descendant in self._limbo_descendants(trans_id):
1320
desc_path = self._limbo_files[descendant]
1321
desc_path = new_path + desc_path[len(old_path):]
1322
self._limbo_files[descendant] = desc_path
1324
def _limbo_descendants(self, trans_id):
1325
"""Return the set of trans_ids whose limbo paths descend from this."""
1326
descendants = set(self._limbo_children.get(trans_id, []))
1327
for descendant in list(descendants):
1328
descendants.update(self._limbo_descendants(descendant))
1331
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1332
"""Schedule creation of a new file.
1336
:param contents: an iterator of strings, all of which will be written
1337
to the target destination.
1338
:param trans_id: TreeTransform handle
1339
:param mode_id: If not None, force the mode of the target file to match
1340
the mode of the object referenced by mode_id.
1341
Otherwise, we will try to preserve mode bits of an existing file.
1342
:param sha1: If the sha1 of this content is already known, pass it in.
1343
We can use it to prevent future sha1 computations.
1345
name = self._limbo_name(trans_id)
1346
with open(name, 'wb') as f:
1347
unique_add(self._new_contents, trans_id, 'file')
1348
f.writelines(contents)
1349
self._set_mtime(name)
1350
self._set_mode(trans_id, mode_id, S_ISREG)
1351
# It is unfortunate we have to use lstat instead of fstat, but we just
1352
# used utime and chmod on the file, so we need the accurate final
1354
if sha1 is not None:
1355
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1357
def _read_symlink_target(self, trans_id):
1358
return os.readlink(self._limbo_name(trans_id))
1360
def _set_mtime(self, path):
1361
"""All files that are created get the same mtime.
1363
This time is set by the first object to be created.
1365
if self._creation_mtime is None:
1366
self._creation_mtime = time.time()
1367
os.utime(path, (self._creation_mtime, self._creation_mtime))
1369
def create_hardlink(self, path, trans_id):
1370
"""Schedule creation of a hard link"""
1371
name = self._limbo_name(trans_id)
1374
except OSError as e:
1375
if e.errno != errno.EPERM:
1377
raise errors.HardLinkNotSupported(path)
1379
unique_add(self._new_contents, trans_id, 'file')
1380
except BaseException:
1381
# Clean up the file, it never got registered so
1382
# TreeTransform.finalize() won't clean it up.
1386
def create_directory(self, trans_id):
1387
"""Schedule creation of a new directory.
1389
See also new_directory.
1391
os.mkdir(self._limbo_name(trans_id))
1392
unique_add(self._new_contents, trans_id, 'directory')
1394
def create_symlink(self, target, trans_id):
1395
"""Schedule creation of a new symbolic link.
1397
target is a bytestring.
1398
See also new_symlink.
1400
if self._create_symlinks:
1401
os.symlink(target, self._limbo_name(trans_id))
1404
path = FinalPaths(self).get_path(trans_id)
1408
'Unable to create symlink "%s" on this filesystem.' % (path,))
1409
# We add symlink to _new_contents even if they are unsupported
1410
# and not created. These entries are subsequently used to avoid
1411
# conflicts on platforms that don't support symlink
1412
unique_add(self._new_contents, trans_id, 'symlink')
1414
def cancel_creation(self, trans_id):
1415
"""Cancel the creation of new file contents."""
1416
del self._new_contents[trans_id]
1417
if trans_id in self._observed_sha1s:
1418
del self._observed_sha1s[trans_id]
1419
children = self._limbo_children.get(trans_id)
1420
# if this is a limbo directory with children, move them before removing
1422
if children is not None:
1423
self._rename_in_limbo(children)
1424
del self._limbo_children[trans_id]
1425
del self._limbo_children_names[trans_id]
1426
delete_any(self._limbo_name(trans_id))
1428
def new_orphan(self, trans_id, parent_id):
1429
conf = self._tree.get_config_stack()
1430
handle_orphan = conf.get('transform.orphan_policy')
1431
handle_orphan(self, trans_id, parent_id)
1434
class OrphaningError(errors.BzrError):
1436
# Only bugs could lead to such exception being seen by the user
1437
internal_error = True
1438
_fmt = "Error while orphaning %s in %s directory"
1440
def __init__(self, orphan, parent):
1441
errors.BzrError.__init__(self)
1442
self.orphan = orphan
1443
self.parent = parent
1446
class OrphaningForbidden(OrphaningError):
1448
_fmt = "Policy: %s doesn't allow creating orphans."
1450
def __init__(self, policy):
1451
errors.BzrError.__init__(self)
1452
self.policy = policy
1455
def move_orphan(tt, orphan_id, parent_id):
1456
"""See TreeTransformBase.new_orphan.
1458
This creates a new orphan in the `brz-orphans` dir at the root of the
1461
:param tt: The TreeTransform orphaning `trans_id`.
1463
:param orphan_id: The trans id that should be orphaned.
1465
:param parent_id: The orphan parent trans id.
1467
# Add the orphan dir if it doesn't exist
1468
orphan_dir_basename = 'brz-orphans'
1469
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1470
if tt.final_kind(od_id) is None:
1471
tt.create_directory(od_id)
1472
parent_path = tt._tree_id_paths[parent_id]
1473
# Find a name that doesn't exist yet in the orphan dir
1474
actual_name = tt.final_name(orphan_id)
1475
new_name = tt._available_backup_name(actual_name, od_id)
1476
tt.adjust_path(new_name, od_id, orphan_id)
1477
trace.warning('%s has been orphaned in %s'
1478
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1481
def refuse_orphan(tt, orphan_id, parent_id):
1482
"""See TreeTransformBase.new_orphan.
1484
This refuses to create orphan, letting the caller handle the conflict.
1486
raise OrphaningForbidden('never')
1489
orphaning_registry = registry.Registry()
1490
orphaning_registry.register(
1491
u'conflict', refuse_orphan,
1492
'Leave orphans in place and create a conflict on the directory.')
1493
orphaning_registry.register(
1494
u'move', move_orphan,
1495
'Move orphans into the brz-orphans directory.')
1496
orphaning_registry._set_default_key(u'conflict')
1499
opt_transform_orphan = _mod_config.RegistryOption(
1500
'transform.orphan_policy', orphaning_registry,
1501
help='Policy for orphaned files during transform operations.',
1505
class TreeTransform(DiskTreeTransform):
1506
"""Represent a tree transformation.
1508
This object is designed to support incremental generation of the transform,
1511
However, it gives optimum performance when parent directories are created
1512
before their contents. The transform is then able to put child files
1513
directly in their parent directory, avoiding later renames.
1515
It is easy to produce malformed transforms, but they are generally
1516
harmless. Attempting to apply a malformed transform will cause an
1517
exception to be raised before any modifications are made to the tree.
1519
Many kinds of malformed transforms can be corrected with the
1520
resolve_conflicts function. The remaining ones indicate programming error,
1521
such as trying to create a file with no path.
1523
Two sets of file creation methods are supplied. Convenience methods are:
1528
These are composed of the low-level methods:
1530
* create_file or create_directory or create_symlink
1534
Transform/Transaction ids
1535
-------------------------
1536
trans_ids are temporary ids assigned to all files involved in a transform.
1537
It's possible, even common, that not all files in the Tree have trans_ids.
1539
trans_ids are used because filenames and file_ids are not good enough
1540
identifiers; filenames change, and not all files have file_ids. File-ids
1541
are also associated with trans-ids, so that moving a file moves its
1544
trans_ids are only valid for the TreeTransform that generated them.
1548
Limbo is a temporary directory use to hold new versions of files.
1549
Files are added to limbo by create_file, create_directory, create_symlink,
1550
and their convenience variants (new_*). Files may be removed from limbo
1551
using cancel_creation. Files are renamed from limbo into their final
1552
location as part of TreeTransform.apply
1554
Limbo must be cleaned up, by either calling TreeTransform.apply or
1555
calling TreeTransform.finalize.
1557
Files are placed into limbo inside their parent directories, where
1558
possible. This reduces subsequent renames, and makes operations involving
1559
lots of files faster. This optimization is only possible if the parent
1560
directory is created *before* creating any of its children, so avoid
1561
creating children before parents, where possible.
1565
This temporary directory is used by _FileMover for storing files that are
1566
about to be deleted. In case of rollback, the files will be restored.
1567
FileMover does not delete files until it is sure that a rollback will not
1571
def __init__(self, tree, pb=None):
1572
"""Note: a tree_write lock is taken on the tree.
1574
Use TreeTransform.finalize() to release the lock (can be omitted if
1575
TreeTransform.apply() called).
1577
tree.lock_tree_write()
1579
limbodir = urlutils.local_path_from_url(
1580
tree._transport.abspath('limbo'))
1581
osutils.ensure_empty_directory_exists(
1583
errors.ExistingLimbo)
1584
deletiondir = urlutils.local_path_from_url(
1585
tree._transport.abspath('pending-deletion'))
1586
osutils.ensure_empty_directory_exists(
1588
errors.ExistingPendingDeletion)
1589
except BaseException:
1593
# Cache of realpath results, to speed up canonical_path
1594
self._realpaths = {}
1595
# Cache of relpath results, to speed up canonical_path
1597
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1598
tree.case_sensitive)
1599
self._deletiondir = deletiondir
1601
def canonical_path(self, path):
1602
"""Get the canonical tree-relative path"""
1603
# don't follow final symlinks
1604
abs = self._tree.abspath(path)
1605
if abs in self._relpaths:
1606
return self._relpaths[abs]
1607
dirname, basename = os.path.split(abs)
1608
if dirname not in self._realpaths:
1609
self._realpaths[dirname] = os.path.realpath(dirname)
1610
dirname = self._realpaths[dirname]
1611
abs = pathjoin(dirname, basename)
1612
if dirname in self._relpaths:
1613
relpath = pathjoin(self._relpaths[dirname], basename)
1614
relpath = relpath.rstrip('/\\')
1616
relpath = self._tree.relpath(abs)
1617
self._relpaths[abs] = relpath
1620
def tree_kind(self, trans_id):
1621
"""Determine the file kind in the working tree.
1623
:returns: The file kind or None if the file does not exist
1625
path = self._tree_id_paths.get(trans_id)
1629
return file_kind(self._tree.abspath(path))
1630
except errors.NoSuchFile:
1633
def _set_mode(self, trans_id, mode_id, typefunc):
1634
"""Set the mode of new file contents.
1635
The mode_id is the existing file to get the mode from (often the same
1636
as trans_id). The operation is only performed if there's a mode match
1637
according to typefunc.
1642
old_path = self._tree_id_paths[mode_id]
1646
mode = os.stat(self._tree.abspath(old_path)).st_mode
1647
except OSError as e:
1648
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1649
# Either old_path doesn't exist, or the parent of the
1650
# target is not a directory (but will be one eventually)
1651
# Either way, we know it doesn't exist *right now*
1652
# See also bug #248448
1657
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1659
def iter_tree_children(self, parent_id):
1660
"""Iterate through the entry's tree children, if any"""
1662
path = self._tree_id_paths[parent_id]
1666
children = os.listdir(self._tree.abspath(path))
1667
except OSError as e:
1668
if not (osutils._is_error_enotdir(e) or
1669
e.errno in (errno.ENOENT, errno.ESRCH)):
1673
for child in children:
1674
childpath = joinpath(path, child)
1675
if self._tree.is_control_filename(childpath):
1677
yield self.trans_id_tree_path(childpath)
1679
def _generate_limbo_path(self, trans_id):
1680
"""Generate a limbo path using the final path if possible.
1682
This optimizes the performance of applying the tree transform by
1683
avoiding renames. These renames can be avoided only when the parent
1684
directory is already scheduled for creation.
1686
If the final path cannot be used, falls back to using the trans_id as
1689
parent = self._new_parent.get(trans_id)
1690
# if the parent directory is already in limbo (e.g. when building a
1691
# tree), choose a limbo name inside the parent, to reduce further
1693
use_direct_path = False
1694
if self._new_contents.get(parent) == 'directory':
1695
filename = self._new_name.get(trans_id)
1696
if filename is not None:
1697
if parent not in self._limbo_children:
1698
self._limbo_children[parent] = set()
1699
self._limbo_children_names[parent] = {}
1700
use_direct_path = True
1701
# the direct path can only be used if no other file has
1702
# already taken this pathname, i.e. if the name is unused, or
1703
# if it is already associated with this trans_id.
1704
elif self._case_sensitive_target:
1705
if (self._limbo_children_names[parent].get(filename)
1706
in (trans_id, None)):
1707
use_direct_path = True
1709
for l_filename, l_trans_id in viewitems(
1710
self._limbo_children_names[parent]):
1711
if l_trans_id == trans_id:
1713
if l_filename.lower() == filename.lower():
1716
use_direct_path = True
1718
if not use_direct_path:
1719
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1721
limbo_name = pathjoin(self._limbo_files[parent], filename)
1722
self._limbo_children[parent].add(trans_id)
1723
self._limbo_children_names[parent][filename] = trans_id
1726
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1727
"""Apply all changes to the inventory and filesystem.
1729
If filesystem or inventory conflicts are present, MalformedTransform
1732
If apply succeeds, finalize is not necessary.
1734
:param no_conflicts: if True, the caller guarantees there are no
1735
conflicts, so no check is made.
1736
:param precomputed_delta: An inventory delta to use instead of
1738
:param _mover: Supply an alternate FileMover, for testing
1740
for hook in MutableTree.hooks['pre_transform']:
1741
hook(self._tree, self)
1742
if not no_conflicts:
1743
self._check_malformed()
1744
with ui.ui_factory.nested_progress_bar() as child_pb:
1745
if precomputed_delta is None:
1746
child_pb.update(gettext('Apply phase'), 0, 2)
1747
inventory_delta = self._generate_inventory_delta()
1750
inventory_delta = precomputed_delta
1753
mover = _FileMover()
1757
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1758
self._apply_removals(mover)
1759
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1760
modified_paths = self._apply_insertions(mover)
1761
except BaseException:
1765
mover.apply_deletions()
1766
if self.final_file_id(self.root) is None:
1767
inventory_delta = [e for e in inventory_delta if e[0] != '']
1768
self._tree.apply_inventory_delta(inventory_delta)
1769
self._apply_observed_sha1s()
1772
return _TransformResults(modified_paths, self.rename_count)
1774
def _generate_inventory_delta(self):
1775
"""Generate an inventory delta for the current transform."""
1776
inventory_delta = []
1777
new_paths = self._inventory_altered()
1778
total_entries = len(new_paths) + len(self._removed_id)
1779
with ui.ui_factory.nested_progress_bar() as child_pb:
1780
for num, trans_id in enumerate(self._removed_id):
1782
child_pb.update(gettext('removing file'),
1784
if trans_id == self._new_root:
1785
file_id = self._tree.get_root_id()
1787
file_id = self.tree_file_id(trans_id)
1788
# File-id isn't really being deleted, just moved
1789
if file_id in self._r_new_id:
1791
path = self._tree_id_paths[trans_id]
1792
inventory_delta.append((path, None, file_id, None))
1793
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1795
for num, (path, trans_id) in enumerate(new_paths):
1797
child_pb.update(gettext('adding file'),
1798
num + len(self._removed_id), total_entries)
1799
file_id = new_path_file_ids[trans_id]
1802
kind = self.final_kind(trans_id)
1804
kind = self._tree.stored_kind(self._tree.id2path(file_id))
1805
parent_trans_id = self.final_parent(trans_id)
1806
parent_file_id = new_path_file_ids.get(parent_trans_id)
1807
if parent_file_id is None:
1808
parent_file_id = self.final_file_id(parent_trans_id)
1809
if trans_id in self._new_reference_revision:
1810
new_entry = inventory.TreeReference(
1812
self._new_name[trans_id],
1813
self.final_file_id(self._new_parent[trans_id]),
1814
None, self._new_reference_revision[trans_id])
1816
new_entry = inventory.make_entry(kind,
1817
self.final_name(trans_id),
1818
parent_file_id, file_id)
1820
old_path = self._tree.id2path(new_entry.file_id)
1821
except errors.NoSuchId:
1823
new_executability = self._new_executability.get(trans_id)
1824
if new_executability is not None:
1825
new_entry.executable = new_executability
1826
inventory_delta.append(
1827
(old_path, path, new_entry.file_id, new_entry))
1828
return inventory_delta
1830
def _apply_removals(self, mover):
1831
"""Perform tree operations that remove directory/inventory names.
1833
That is, delete files that are to be deleted, and put any files that
1834
need renaming into limbo. This must be done in strict child-to-parent
1837
If inventory_delta is None, no inventory delta generation is performed.
1839
tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
1840
with ui.ui_factory.nested_progress_bar() as child_pb:
1841
for num, (path, trans_id) in enumerate(tree_paths):
1842
# do not attempt to move root into a subdirectory of itself.
1845
child_pb.update(gettext('removing file'), num, len(tree_paths))
1846
full_path = self._tree.abspath(path)
1847
if trans_id in self._removed_contents:
1848
delete_path = os.path.join(self._deletiondir, trans_id)
1849
mover.pre_delete(full_path, delete_path)
1850
elif (trans_id in self._new_name or
1851
trans_id in self._new_parent):
1853
mover.rename(full_path, self._limbo_name(trans_id))
1854
except errors.TransformRenameFailed as e:
1855
if e.errno != errno.ENOENT:
1858
self.rename_count += 1
1860
def _apply_insertions(self, mover):
1861
"""Perform tree operations that insert directory/inventory names.
1863
That is, create any files that need to be created, and restore from
1864
limbo any files that needed renaming. This must be done in strict
1865
parent-to-child order.
1867
If inventory_delta is None, no inventory delta is calculated, and
1868
no list of modified paths is returned.
1870
new_paths = self.new_paths(filesystem_only=True)
1872
with ui.ui_factory.nested_progress_bar() as child_pb:
1873
for num, (path, trans_id) in enumerate(new_paths):
1875
child_pb.update(gettext('adding file'),
1876
num, len(new_paths))
1877
full_path = self._tree.abspath(path)
1878
if trans_id in self._needs_rename:
1880
mover.rename(self._limbo_name(trans_id), full_path)
1881
except errors.TransformRenameFailed as e:
1882
# We may be renaming a dangling inventory id
1883
if e.errno != errno.ENOENT:
1886
self.rename_count += 1
1887
# TODO: if trans_id in self._observed_sha1s, we should
1888
# re-stat the final target, since ctime will be
1889
# updated by the change.
1890
if (trans_id in self._new_contents
1891
or self.path_changed(trans_id)):
1892
if trans_id in self._new_contents:
1893
modified_paths.append(full_path)
1894
if trans_id in self._new_executability:
1895
self._set_executability(path, trans_id)
1896
if trans_id in self._observed_sha1s:
1897
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1898
st = osutils.lstat(full_path)
1899
self._observed_sha1s[trans_id] = (o_sha1, st)
1900
for path, trans_id in new_paths:
1901
# new_paths includes stuff like workingtree conflicts. Only the
1902
# stuff in new_contents actually comes from limbo.
1903
if trans_id in self._limbo_files:
1904
del self._limbo_files[trans_id]
1905
self._new_contents.clear()
1906
return modified_paths
1908
def _apply_observed_sha1s(self):
1909
"""After we have finished renaming everything, update observed sha1s
1911
This has to be done after self._tree.apply_inventory_delta, otherwise
1912
it doesn't know anything about the files we are updating. Also, we want
1913
to do this as late as possible, so that most entries end up cached.
1915
# TODO: this doesn't update the stat information for directories. So
1916
# the first 'bzr status' will still need to rewrite
1917
# .bzr/checkout/dirstate. However, we at least don't need to
1918
# re-read all of the files.
1919
# TODO: If the operation took a while, we could do a time.sleep(3) here
1920
# to allow the clock to tick over and ensure we won't have any
1921
# problems. (we could observe start time, and finish time, and if
1922
# it is less than eg 10% overhead, add a sleep call.)
1923
paths = FinalPaths(self)
1924
for trans_id, observed in viewitems(self._observed_sha1s):
1925
path = paths.get_path(trans_id)
1926
self._tree._observed_sha1(path, observed)
1929
class TransformPreview(DiskTreeTransform):
1930
"""A TreeTransform for generating preview trees.
1932
Unlike TreeTransform, this version works when the input tree is a
1933
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1934
unversioned files in the input tree.
1937
def __init__(self, tree, pb=None, case_sensitive=True):
1939
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1940
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1942
def canonical_path(self, path):
1945
def tree_kind(self, trans_id):
1946
path = self._tree_id_paths.get(trans_id)
1949
kind = self._tree.path_content_summary(path)[0]
1950
if kind == 'missing':
1954
def _set_mode(self, trans_id, mode_id, typefunc):
1955
"""Set the mode of new file contents.
1956
The mode_id is the existing file to get the mode from (often the same
1957
as trans_id). The operation is only performed if there's a mode match
1958
according to typefunc.
1960
# is it ok to ignore this? probably
1963
def iter_tree_children(self, parent_id):
1964
"""Iterate through the entry's tree children, if any"""
1966
path = self._tree_id_paths[parent_id]
1970
entry = next(self._tree.iter_entries_by_dir(
1971
specific_files=[path]))[1]
1972
except StopIteration:
1974
children = getattr(entry, 'children', {})
1975
for child in children:
1976
childpath = joinpath(path, child)
1977
yield self.trans_id_tree_path(childpath)
1979
def new_orphan(self, trans_id, parent_id):
1980
raise NotImplementedError(self.new_orphan)
1983
class _PreviewTree(inventorytree.InventoryTree):
1984
"""Partial implementation of Tree to support show_diff_trees"""
1986
def __init__(self, transform):
1987
self._transform = transform
1988
self._final_paths = FinalPaths(transform)
1989
self.__by_parent = None
1990
self._parent_ids = []
1991
self._all_children_cache = {}
1992
self._path2trans_id_cache = {}
1993
self._final_name_cache = {}
1994
self._iter_changes_cache = dict((c[0], c) for c in
1995
self._transform.iter_changes())
1997
def _content_change(self, file_id):
1998
"""Return True if the content of this file changed"""
1999
changes = self._iter_changes_cache.get(file_id)
2000
# changes[2] is true if the file content changed. See
2001
# InterTree.iter_changes.
2002
return (changes is not None and changes[2])
2004
def _get_repository(self):
2005
repo = getattr(self._transform._tree, '_repository', None)
2007
repo = self._transform._tree.branch.repository
2010
def _iter_parent_trees(self):
2011
for revision_id in self.get_parent_ids():
2013
yield self.revision_tree(revision_id)
2014
except errors.NoSuchRevisionInTree:
2015
yield self._get_repository().revision_tree(revision_id)
2017
def _get_file_revision(self, path, file_id, vf, tree_revision):
2019
(file_id, t.get_file_revision(t.id2path(file_id)))
2020
for t in self._iter_parent_trees()]
2021
vf.add_lines((file_id, tree_revision), parent_keys,
2022
self.get_file_lines(path))
2023
repo = self._get_repository()
2024
base_vf = repo.texts
2025
if base_vf not in vf.fallback_versionedfiles:
2026
vf.fallback_versionedfiles.append(base_vf)
2027
return tree_revision
2029
def _stat_limbo_file(self, trans_id):
2030
name = self._transform._limbo_name(trans_id)
2031
return os.lstat(name)
2034
def _by_parent(self):
2035
if self.__by_parent is None:
2036
self.__by_parent = self._transform.by_parent()
2037
return self.__by_parent
2039
def _comparison_data(self, entry, path):
2040
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2041
if kind == 'missing':
2045
file_id = self._transform.final_file_id(self._path2trans_id(path))
2046
executable = self.is_executable(path)
2047
return kind, executable, None
2049
def is_locked(self):
2052
def lock_read(self):
2053
# Perhaps in theory, this should lock the TreeTransform?
2054
return lock.LogicalLockResult(self.unlock)
2060
def root_inventory(self):
2061
"""This Tree does not use inventory as its backing data."""
2062
raise NotImplementedError(_PreviewTree.root_inventory)
2064
def get_root_id(self):
2065
return self._transform.final_file_id(self._transform.root)
2067
def all_file_ids(self):
2068
tree_ids = set(self._transform._tree.all_file_ids())
2069
tree_ids.difference_update(self._transform.tree_file_id(t)
2070
for t in self._transform._removed_id)
2071
tree_ids.update(viewvalues(self._transform._new_id))
2074
def all_versioned_paths(self):
2075
tree_paths = set(self._transform._tree.all_versioned_paths())
2077
tree_paths.difference_update(
2078
self._transform.trans_id_tree_path(t)
2079
for t in self._transform._removed_id)
2082
self._final_paths._determine_path(t)
2083
for t in self._transform._new_id)
2087
def _has_id(self, file_id, fallback_check):
2088
if file_id in self._transform._r_new_id:
2090
elif file_id in {self._transform.tree_file_id(trans_id) for
2091
trans_id in self._transform._removed_id}:
2094
return fallback_check(file_id)
2096
def has_id(self, file_id):
2097
return self._has_id(file_id, self._transform._tree.has_id)
2099
def has_or_had_id(self, file_id):
2100
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2102
def _path2trans_id(self, path):
2103
# We must not use None here, because that is a valid value to store.
2104
trans_id = self._path2trans_id_cache.get(path, object)
2105
if trans_id is not object:
2107
segments = splitpath(path)
2108
cur_parent = self._transform.root
2109
for cur_segment in segments:
2110
for child in self._all_children(cur_parent):
2111
final_name = self._final_name_cache.get(child)
2112
if final_name is None:
2113
final_name = self._transform.final_name(child)
2114
self._final_name_cache[child] = final_name
2115
if final_name == cur_segment:
2119
self._path2trans_id_cache[path] = None
2121
self._path2trans_id_cache[path] = cur_parent
2124
def path2id(self, path):
2125
if isinstance(path, list):
2128
path = osutils.pathjoin(*path)
2129
return self._transform.final_file_id(self._path2trans_id(path))
2131
def id2path(self, file_id):
2132
trans_id = self._transform.trans_id_file_id(file_id)
2134
return self._final_paths._determine_path(trans_id)
2136
raise errors.NoSuchId(self, file_id)
2138
def _all_children(self, trans_id):
2139
children = self._all_children_cache.get(trans_id)
2140
if children is not None:
2142
children = set(self._transform.iter_tree_children(trans_id))
2143
# children in the _new_parent set are provided by _by_parent.
2144
children.difference_update(self._transform._new_parent)
2145
children.update(self._by_parent.get(trans_id, []))
2146
self._all_children_cache[trans_id] = children
2149
def _iter_children(self, file_id):
2150
trans_id = self._transform.trans_id_file_id(file_id)
2151
for child_trans_id in self._all_children(trans_id):
2152
yield self._transform.final_file_id(child_trans_id)
2155
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2156
in self._transform._tree.extras())
2157
possible_extras.update(self._transform._new_contents)
2158
possible_extras.update(self._transform._removed_id)
2159
for trans_id in possible_extras:
2160
if self._transform.final_file_id(trans_id) is None:
2161
yield self._final_paths._determine_path(trans_id)
2163
def _make_inv_entries(self, ordered_entries, specific_files=None):
2164
for trans_id, parent_file_id in ordered_entries:
2165
file_id = self._transform.final_file_id(trans_id)
2168
if (specific_files is not None
2169
and self._final_paths.get_path(trans_id) not in specific_files):
2171
kind = self._transform.final_kind(trans_id)
2173
kind = self._transform._tree.stored_kind(
2174
self._transform._tree.id2path(file_id))
2175
new_entry = inventory.make_entry(
2177
self._transform.final_name(trans_id),
2178
parent_file_id, file_id)
2179
yield new_entry, trans_id
2181
def _list_files_by_dir(self):
2182
todo = [ROOT_PARENT]
2184
while len(todo) > 0:
2186
parent_file_id = self._transform.final_file_id(parent)
2187
children = list(self._all_children(parent))
2188
paths = dict(zip(children, self._final_paths.get_paths(children)))
2189
children.sort(key=paths.get)
2190
todo.extend(reversed(children))
2191
for trans_id in children:
2192
ordered_ids.append((trans_id, parent_file_id))
2195
def iter_child_entries(self, path):
2196
trans_id = self._path2trans_id(path)
2197
if trans_id is None:
2198
raise errors.NoSuchFile(path)
2199
todo = [(child_trans_id, trans_id) for child_trans_id in
2200
self._all_children(trans_id)]
2201
for entry, trans_id in self._make_inv_entries(todo):
2204
def iter_entries_by_dir(self, specific_files=None):
2205
# This may not be a maximally efficient implementation, but it is
2206
# reasonably straightforward. An implementation that grafts the
2207
# TreeTransform changes onto the tree's iter_entries_by_dir results
2208
# might be more efficient, but requires tricky inferences about stack
2210
ordered_ids = self._list_files_by_dir()
2211
for entry, trans_id in self._make_inv_entries(ordered_ids,
2213
yield self._final_paths.get_path(trans_id), entry
2215
def _iter_entries_for_dir(self, dir_path):
2216
"""Return path, entry for items in a directory without recursing down."""
2218
dir_trans_id = self._path2trans_id(dir_path)
2219
dir_id = self._transform.final_file_id(dir_trans_id)
2220
for child_trans_id in self._all_children(dir_trans_id):
2221
ordered_ids.append((child_trans_id, dir_id))
2223
for entry, trans_id in self._make_inv_entries(ordered_ids):
2224
path_entries.append((self._final_paths.get_path(trans_id), entry))
2228
def list_files(self, include_root=False, from_dir=None, recursive=True):
2229
"""See WorkingTree.list_files."""
2230
# XXX This should behave like WorkingTree.list_files, but is really
2231
# more like RevisionTree.list_files.
2237
prefix = from_dir + '/'
2238
entries = self.iter_entries_by_dir()
2239
for path, entry in entries:
2240
if entry.name == '' and not include_root:
2243
if not path.startswith(prefix):
2245
path = path[len(prefix):]
2246
yield path, 'V', entry.kind, entry
2248
if from_dir is None and include_root is True:
2249
root_entry = inventory.make_entry(
2250
'directory', '', ROOT_PARENT, self.get_root_id())
2251
yield '', 'V', 'directory', root_entry
2252
entries = self._iter_entries_for_dir(from_dir or '')
2253
for path, entry in entries:
2254
yield path, 'V', entry.kind, entry
2256
def kind(self, path):
2257
trans_id = self._path2trans_id(path)
2258
if trans_id is None:
2259
raise errors.NoSuchFile(path)
2260
return self._transform.final_kind(trans_id)
2262
def stored_kind(self, path):
2263
trans_id = self._path2trans_id(path)
2264
if trans_id is None:
2265
raise errors.NoSuchFile(path)
2267
return self._transform._new_contents[trans_id]
2269
return self._transform._tree.stored_kind(path)
2271
def get_file_mtime(self, path):
2272
"""See Tree.get_file_mtime"""
2273
file_id = self.path2id(path)
2275
raise errors.NoSuchFile(path)
2276
if not self._content_change(file_id):
2277
return self._transform._tree.get_file_mtime(
2278
self._transform._tree.id2path(file_id))
2279
trans_id = self._path2trans_id(path)
2280
return self._stat_limbo_file(trans_id).st_mtime
2282
def get_file_size(self, path):
2283
"""See Tree.get_file_size"""
2284
trans_id = self._path2trans_id(path)
2285
if trans_id is None:
2286
raise errors.NoSuchFile(path)
2287
kind = self._transform.final_kind(trans_id)
2290
if trans_id in self._transform._new_contents:
2291
return self._stat_limbo_file(trans_id).st_size
2292
if self.kind(path) == 'file':
2293
return self._transform._tree.get_file_size(path)
2297
def get_file_verifier(self, path, stat_value=None):
2298
trans_id = self._path2trans_id(path)
2299
if trans_id is None:
2300
raise errors.NoSuchFile(path)
2301
kind = self._transform._new_contents.get(trans_id)
2303
return self._transform._tree.get_file_verifier(path)
2305
with self.get_file(path) as fileobj:
2306
return ("SHA1", sha_file(fileobj))
2308
def get_file_sha1(self, path, stat_value=None):
2309
trans_id = self._path2trans_id(path)
2310
if trans_id is None:
2311
raise errors.NoSuchFile(path)
2312
kind = self._transform._new_contents.get(trans_id)
2314
return self._transform._tree.get_file_sha1(path)
2316
with self.get_file(path) as fileobj:
2317
return sha_file(fileobj)
2319
def is_executable(self, path):
2320
trans_id = self._path2trans_id(path)
2321
if trans_id is None:
2324
return self._transform._new_executability[trans_id]
2327
return self._transform._tree.is_executable(path)
2328
except OSError as e:
2329
if e.errno == errno.ENOENT:
2332
except errors.NoSuchFile:
2335
def has_filename(self, path):
2336
trans_id = self._path2trans_id(path)
2337
if trans_id in self._transform._new_contents:
2339
elif trans_id in self._transform._removed_contents:
2342
return self._transform._tree.has_filename(path)
2344
def path_content_summary(self, path):
2345
trans_id = self._path2trans_id(path)
2346
tt = self._transform
2347
tree_path = tt._tree_id_paths.get(trans_id)
2348
kind = tt._new_contents.get(trans_id)
2350
if tree_path is None or trans_id in tt._removed_contents:
2351
return 'missing', None, None, None
2352
summary = tt._tree.path_content_summary(tree_path)
2353
kind, size, executable, link_or_sha1 = summary
2356
limbo_name = tt._limbo_name(trans_id)
2357
if trans_id in tt._new_reference_revision:
2358
kind = 'tree-reference'
2360
statval = os.lstat(limbo_name)
2361
size = statval.st_size
2362
if not tt._limbo_supports_executable():
2365
executable = statval.st_mode & S_IEXEC
2369
if kind == 'symlink':
2370
link_or_sha1 = os.readlink(limbo_name)
2371
if not isinstance(link_or_sha1, text_type):
2372
link_or_sha1 = link_or_sha1.decode(osutils._fs_enc)
2373
executable = tt._new_executability.get(trans_id, executable)
2374
return kind, size, executable, link_or_sha1
2376
def iter_changes(self, from_tree, include_unchanged=False,
2377
specific_files=None, pb=None, extra_trees=None,
2378
require_versioned=True, want_unversioned=False):
2379
"""See InterTree.iter_changes.
2381
This has a fast path that is only used when the from_tree matches
2382
the transform tree, and no fancy options are supplied.
2384
if (from_tree is not self._transform._tree or include_unchanged
2385
or specific_files or want_unversioned):
2386
return tree.InterTree(from_tree, self).iter_changes(
2387
include_unchanged=include_unchanged,
2388
specific_files=specific_files,
2390
extra_trees=extra_trees,
2391
require_versioned=require_versioned,
2392
want_unversioned=want_unversioned)
2393
if want_unversioned:
2394
raise ValueError('want_unversioned is not supported')
2395
return self._transform.iter_changes()
2397
def get_file(self, path):
2398
"""See Tree.get_file"""
2399
file_id = self.path2id(path)
2400
if not self._content_change(file_id):
2401
return self._transform._tree.get_file(path)
2402
trans_id = self._path2trans_id(path)
2403
name = self._transform._limbo_name(trans_id)
2404
return open(name, 'rb')
2406
def get_file_with_stat(self, path):
2407
return self.get_file(path), None
2409
def annotate_iter(self, path,
2410
default_revision=_mod_revision.CURRENT_REVISION):
2411
file_id = self.path2id(path)
2412
changes = self._iter_changes_cache.get(file_id)
2416
changed_content, versioned, kind = (changes[2], changes[3],
2420
get_old = (kind[0] == 'file' and versioned[0])
2422
old_annotation = self._transform._tree.annotate_iter(
2423
path, default_revision=default_revision)
2427
return old_annotation
2428
if not changed_content:
2429
return old_annotation
2430
# TODO: This is doing something similar to what WT.annotate_iter is
2431
# doing, however it fails slightly because it doesn't know what
2432
# the *other* revision_id is, so it doesn't know how to give the
2433
# other as the origin for some lines, they all get
2434
# 'default_revision'
2435
# It would be nice to be able to use the new Annotator based
2436
# approach, as well.
2437
return annotate.reannotate([old_annotation],
2438
self.get_file(path).readlines(),
2441
def get_symlink_target(self, path):
2442
"""See Tree.get_symlink_target"""
2443
file_id = self.path2id(path)
2444
if not self._content_change(file_id):
2445
return self._transform._tree.get_symlink_target(path)
2446
trans_id = self._path2trans_id(path)
2447
name = self._transform._limbo_name(trans_id)
2448
return osutils.readlink(name)
2450
def walkdirs(self, prefix=''):
2451
pending = [self._transform.root]
2452
while len(pending) > 0:
2453
parent_id = pending.pop()
2456
prefix = prefix.rstrip('/')
2457
parent_path = self._final_paths.get_path(parent_id)
2458
parent_file_id = self._transform.final_file_id(parent_id)
2459
for child_id in self._all_children(parent_id):
2460
path_from_root = self._final_paths.get_path(child_id)
2461
basename = self._transform.final_name(child_id)
2462
file_id = self._transform.final_file_id(child_id)
2463
kind = self._transform.final_kind(child_id)
2464
if kind is not None:
2465
versioned_kind = kind
2468
versioned_kind = self._transform._tree.stored_kind(
2469
self._transform._tree.id2path(file_id))
2470
if versioned_kind == 'directory':
2471
subdirs.append(child_id)
2472
children.append((path_from_root, basename, kind, None,
2473
file_id, versioned_kind))
2475
if parent_path.startswith(prefix):
2476
yield (parent_path, parent_file_id), children
2477
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2480
def get_parent_ids(self):
2481
return self._parent_ids
2483
def set_parent_ids(self, parent_ids):
2484
self._parent_ids = parent_ids
2486
def get_revision_tree(self, revision_id):
2487
return self._transform._tree.get_revision_tree(revision_id)
864
2490
def joinpath(parent, child):
865
2491
"""Join tree-relative paths, handling the tree root specially"""
866
2492
if parent is None or parent == "":
896
2523
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2524
return self._known_paths[trans_id]
2526
def get_paths(self, trans_ids):
2527
return [(self.get_path(t), t) for t in trans_ids]
899
2530
def topology_sorted_ids(tree):
900
2531
"""Determine the topological order of the ids in a tree"""
901
2532
file_ids = list(tree)
902
2533
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2537
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2538
delta_from_tree=False):
2539
"""Create working tree for a branch, using a TreeTransform.
2541
This function should be used on empty trees, having a tree root at most.
2542
(see merge and revert functionality for working with existing trees)
2544
Existing files are handled like so:
2546
- Existing bzrdirs take precedence over creating new items. They are
2547
created as '%s.diverted' % name.
2548
- Otherwise, if the content on disk matches the content we are building,
2549
it is silently replaced.
2550
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2552
:param tree: The tree to convert wt into a copy of
2553
:param wt: The working tree that files will be placed into
2554
:param accelerator_tree: A tree which can be used for retrieving file
2555
contents more quickly than tree itself, i.e. a workingtree. tree
2556
will be used for cases where accelerator_tree's content is different.
2557
:param hardlink: If true, hard-link files to accelerator_tree, where
2558
possible. accelerator_tree must implement abspath, i.e. be a
2560
:param delta_from_tree: If true, build_tree may use the input Tree to
2561
generate the inventory delta.
2563
with wt.lock_tree_write(), tree.lock_read():
2564
if accelerator_tree is not None:
2565
accelerator_tree.lock_read()
2567
return _build_tree(tree, wt, accelerator_tree, hardlink,
2570
if accelerator_tree is not None:
2571
accelerator_tree.unlock()
2574
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2575
"""See build_tree."""
2576
for num, _unused in enumerate(wt.all_versioned_paths()):
2577
if num > 0: # more than just a root
2578
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2579
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2580
top_pb = ui.ui_factory.nested_progress_bar()
909
2581
pp = ProgressPhase("Build phase", 2, top_pb)
2582
if tree.get_root_id() is not None:
2583
# This is kind of a hack: we should be altering the root
2584
# as part of the regular tree shape diff logic.
2585
# The conditional test here is to avoid doing an
2586
# expensive operation (flush) every time the root id
2587
# is set within the tree, nor setting the root and thus
2588
# marking the tree as dirty, because we use two different
2589
# idioms here: tree interfaces and inventory interfaces.
2590
if wt.get_root_id() != tree.get_root_id():
2591
wt.set_root_id(tree.get_root_id())
910
2593
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2597
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_path('')
2598
with ui.ui_factory.nested_progress_bar() as pb:
2599
deferred_contents = []
2601
total = len(tree.all_versioned_paths())
2603
precomputed_delta = []
2605
precomputed_delta = None
2606
# Check if tree inventory has content. If so, we populate
2607
# existing_files with the directory content. If there are no
2608
# entries we skip populating existing_files as its not used.
2609
# This improves performance and unncessary work on large
2610
# directory trees. (#501307)
2612
existing_files = set()
2613
for dir, files in wt.walkdirs():
2614
existing_files.update(f[0] for f in files)
2615
for num, (tree_path, entry) in \
2616
enumerate(tree.iter_entries_by_dir()):
2617
pb.update(gettext("Building tree"), num
2618
- len(deferred_contents), total)
920
2619
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2622
file_id = entry.file_id
2624
precomputed_delta.append((None, tree_path, file_id, entry))
2625
if tree_path in existing_files:
2626
target_path = wt.abspath(tree_path)
2627
kind = file_kind(target_path)
2628
if kind == "directory":
2630
controldir.ControlDir.open(target_path)
2631
except errors.NotBranchError:
2635
if (file_id not in divert
2637
tree, entry, tree_path, file_id, kind,
2639
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2640
if kind == 'directory':
924
2642
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2643
if entry.kind == 'file':
2644
# We *almost* replicate new_by_entry, so that we can defer
2645
# getting the file text, and get them all at once.
2646
trans_id = tt.create_path(entry.name, parent_id)
2647
file_trans_id[file_id] = trans_id
2648
tt.version_file(file_id, trans_id)
2649
executable = tree.is_executable(tree_path)
2651
tt.set_executability(executable, trans_id)
2652
trans_data = (trans_id, file_id,
2653
tree_path, entry.text_sha1)
2654
deferred_contents.append((tree_path, trans_data))
2656
file_trans_id[file_id] = new_by_entry(
2657
tree_path, tt, entry, parent_id, tree)
2659
new_trans_id = file_trans_id[file_id]
2660
old_parent = tt.trans_id_tree_path(tree_path)
2661
_reparent_children(tt, old_parent, new_trans_id)
2662
offset = num + 1 - len(deferred_contents)
2663
_create_files(tt, tree, deferred_contents, pb, offset,
2664
accelerator_tree, hardlink)
2666
divert_trans = set(file_trans_id[f] for f in divert)
2669
return resolve_checkout(t, c, divert_trans)
2670
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2671
if len(raw_conflicts) > 0:
2672
precomputed_delta = None
2673
conflicts = cook_conflicts(raw_conflicts, tt)
2674
for conflict in conflicts:
2675
trace.warning(text_type(conflict))
2677
wt.add_conflicts(conflicts)
2678
except errors.UnsupportedOperation:
2680
result = tt.apply(no_conflicts=True,
2681
precomputed_delta=precomputed_delta)
933
2684
top_pb.finished()
935
def new_by_entry(tt, entry, parent_id, tree):
2688
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2690
total = len(desired_files) + offset
2692
if accelerator_tree is None:
2693
new_desired_files = desired_files
2695
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2696
unchanged = [(p[0], p[1]) for (f, p, c, v, d, n, k, e)
2697
in iter if not (c or e[0] != e[1])]
2698
if accelerator_tree.supports_content_filtering():
2699
unchanged = [(tp, ap) for (tp, ap) in unchanged
2700
if not next(accelerator_tree.iter_search_rules([ap]))]
2701
unchanged = dict(unchanged)
2702
new_desired_files = []
2704
for unused_tree_path, (trans_id, file_id, tree_path, text_sha1) in desired_files:
2705
accelerator_path = unchanged.get(tree_path)
2706
if accelerator_path is None:
2707
new_desired_files.append((tree_path,
2708
(trans_id, file_id, tree_path, text_sha1)))
2710
pb.update(gettext('Adding file contents'), count + offset, total)
2712
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2715
with accelerator_tree.get_file(accelerator_path) as f:
2716
chunks = osutils.file_iterator(f)
2717
if wt.supports_content_filtering():
2718
filters = wt._content_filter_stack(tree_path)
2719
chunks = filtered_output_bytes(chunks, filters,
2720
ContentFilterContext(tree_path, tree))
2721
tt.create_file(chunks, trans_id, sha1=text_sha1)
2724
for count, ((trans_id, file_id, tree_path, text_sha1), contents) in enumerate(
2725
tree.iter_files_bytes(new_desired_files)):
2726
if wt.supports_content_filtering():
2727
filters = wt._content_filter_stack(tree_path)
2728
contents = filtered_output_bytes(contents, filters,
2729
ContentFilterContext(tree_path, tree))
2730
tt.create_file(contents, trans_id, sha1=text_sha1)
2731
pb.update(gettext('Adding file contents'), count + offset, total)
2734
def _reparent_children(tt, old_parent, new_parent):
2735
for child in tt.iter_tree_children(old_parent):
2736
tt.adjust_path(tt.final_name(child), new_parent, child)
2739
def _reparent_transform_children(tt, old_parent, new_parent):
2740
by_parent = tt.by_parent()
2741
for child in by_parent[old_parent]:
2742
tt.adjust_path(tt.final_name(child), new_parent, child)
2743
return by_parent[old_parent]
2746
def _content_match(tree, entry, tree_path, file_id, kind, target_path):
2747
if entry.kind != kind:
2749
if entry.kind == "directory":
2751
if entry.kind == "file":
2752
with open(target_path, 'rb') as f1, \
2753
tree.get_file(tree_path) as f2:
2754
if osutils.compare_files(f1, f2):
2756
elif entry.kind == "symlink":
2757
if tree.get_symlink_target(tree_path) == os.readlink(target_path):
2762
def resolve_checkout(tt, conflicts, divert):
2763
new_conflicts = set()
2764
for c_type, conflict in ((c[0], c) for c in conflicts):
2765
# Anything but a 'duplicate' would indicate programmer error
2766
if c_type != 'duplicate':
2767
raise AssertionError(c_type)
2768
# Now figure out which is new and which is old
2769
if tt.new_contents(conflict[1]):
2770
new_file = conflict[1]
2771
old_file = conflict[2]
2773
new_file = conflict[2]
2774
old_file = conflict[1]
2776
# We should only get here if the conflict wasn't completely
2778
final_parent = tt.final_parent(old_file)
2779
if new_file in divert:
2780
new_name = tt.final_name(old_file) + '.diverted'
2781
tt.adjust_path(new_name, final_parent, new_file)
2782
new_conflicts.add((c_type, 'Diverted to',
2783
new_file, old_file))
2785
new_name = tt.final_name(old_file) + '.moved'
2786
tt.adjust_path(new_name, final_parent, old_file)
2787
new_conflicts.add((c_type, 'Moved existing file to',
2788
old_file, new_file))
2789
return new_conflicts
2792
def new_by_entry(path, tt, entry, parent_id, tree):
936
2793
"""Create a new file according to its inventory entry"""
937
2794
name = entry.name
938
2795
kind = entry.kind
939
2796
if kind == 'file':
940
contents = tree.get_file(entry.file_id).readlines()
941
executable = tree.is_executable(entry.file_id)
942
return tt.new_file(name, parent_id, contents, entry.file_id,
944
elif kind == 'directory':
945
return tt.new_directory(name, parent_id, entry.file_id)
2797
with tree.get_file(path) as f:
2798
executable = tree.is_executable(path)
2800
name, parent_id, osutils.file_iterator(f), entry.file_id,
2802
elif kind in ('directory', 'tree-reference'):
2803
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2804
if kind == 'tree-reference':
2805
tt.set_tree_reference(entry.reference_revision, trans_id)
946
2807
elif kind == 'symlink':
947
target = tree.get_symlink_target(entry.file_id)
2808
target = tree.get_symlink_target(path)
948
2809
return tt.new_symlink(name, parent_id, target, entry.file_id)
950
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
951
"""Create new file contents according to an inventory entry."""
952
if entry.kind == "file":
954
lines = tree.get_file(entry.file_id).readlines()
955
tt.create_file(lines, trans_id, mode_id=mode_id)
956
elif entry.kind == "symlink":
957
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
958
elif entry.kind == "directory":
2811
raise errors.BadFileKindError(name, kind)
2814
def create_from_tree(tt, trans_id, tree, path, file_id=None, chunks=None,
2815
filter_tree_path=None):
2816
"""Create new file contents according to tree contents.
2818
:param filter_tree_path: the tree path to use to lookup
2819
content filters to apply to the bytes output in the working tree.
2820
This only applies if the working tree supports content filtering.
2822
kind = tree.kind(path)
2823
if kind == 'directory':
959
2824
tt.create_directory(trans_id)
2825
elif kind == "file":
2827
f = tree.get_file(path)
2828
chunks = osutils.file_iterator(f)
2833
if wt.supports_content_filtering() and filter_tree_path is not None:
2834
filters = wt._content_filter_stack(filter_tree_path)
2835
chunks = filtered_output_bytes(
2837
ContentFilterContext(filter_tree_path, tree))
2838
tt.create_file(chunks, trans_id)
2842
elif kind == "symlink":
2843
tt.create_symlink(tree.get_symlink_target(path), trans_id)
2845
raise AssertionError('Unknown kind %r' % kind)
961
2848
def create_entry_executability(tt, entry, trans_id):
962
2849
"""Set the executability of a trans_id according to an inventory entry"""
964
2851
tt.set_executability(entry.executable, trans_id)
967
def find_interesting(working_tree, target_tree, filenames):
968
"""Find the ids corresponding to specified filenames."""
970
interesting_ids = None
972
interesting_ids = set()
973
for tree_path in filenames:
975
for tree in (working_tree, target_tree):
976
file_id = tree.inventory.path2id(tree_path)
977
if file_id is not None:
978
interesting_ids.add(file_id)
981
raise NotVersionedError(path=tree_path)
982
return interesting_ids
985
def change_entry(tt, file_id, working_tree, target_tree,
986
trans_id_file_id, backups, trans_id, by_parent):
987
"""Replace a file_id's contents with those from a target tree."""
988
e_trans_id = trans_id_file_id(file_id)
989
entry = target_tree.inventory[file_id]
990
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
996
tt.delete_contents(e_trans_id)
998
parent_trans_id = trans_id_file_id(entry.parent_id)
999
backup_name = get_backup_name(entry, by_parent,
1000
parent_trans_id, tt)
1001
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1002
tt.unversion_file(e_trans_id)
1003
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1004
tt.version_file(file_id, e_trans_id)
1005
trans_id[file_id] = e_trans_id
1006
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1007
create_entry_executability(tt, entry, e_trans_id)
1010
tt.set_executability(entry.executable, e_trans_id)
1011
if tt.final_name(e_trans_id) != entry.name:
1014
parent_id = tt.final_parent(e_trans_id)
1015
parent_file_id = tt.final_file_id(parent_id)
1016
if parent_file_id != entry.parent_id:
1021
parent_trans_id = trans_id_file_id(entry.parent_id)
1022
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
1025
def get_backup_name(entry, by_parent, parent_trans_id, tt):
1026
"""Produce a backup-style name that appears to be available"""
1030
yield "%s.~%d~" % (entry.name, counter)
1032
for name in name_gen():
1033
if not tt.has_named_child(by_parent, parent_trans_id, name):
1036
def _entry_changes(file_id, entry, working_tree):
1037
"""Determine in which ways the inventory entry has changed.
1039
Returns booleans: has_contents, content_mod, meta_mod
1040
has_contents means there are currently contents, but they differ
1041
contents_mod means contents need to be modified
1042
meta_mod means the metadata needs to be modified
1044
cur_entry = working_tree.inventory[file_id]
1046
working_kind = working_tree.kind(file_id)
1049
if e.errno != errno.ENOENT:
1051
has_contents = False
1054
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
1059
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
cur_entry._forget_tree_state()
1065
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2854
def revert(working_tree, target_tree, filenames, backups=False,
2855
pb=None, change_reporter=None):
1070
2856
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
1075
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2857
pb = ui.ui_factory.nested_progress_bar()
2859
with target_tree.lock_read(), TreeTransform(working_tree, pb) as tt:
2860
pp = ProgressPhase("Revert phase", 3, pb)
2861
conflicts, merge_modified = _prepare_revert_transform(
2862
working_tree, target_tree, tt, filenames, backups, pp)
2864
change_reporter = delta._ChangeReporter(
2865
unversioned_filter=working_tree.is_ignored)
2866
delta.report_changes(tt.iter_changes(), change_reporter)
2867
for conflict in conflicts:
2868
trace.warning(text_type(conflict))
2871
if working_tree.supports_merge_modified():
2872
working_tree.set_merge_modified(merge_modified)
2878
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2879
backups, pp, basis_tree=None,
2880
merge_modified=None):
2881
with ui.ui_factory.nested_progress_bar() as child_pb:
2882
if merge_modified is None:
2883
merge_modified = working_tree.merge_modified()
2884
merge_modified = _alter_files(working_tree, target_tree, tt,
2885
child_pb, filenames, backups,
2886
merge_modified, basis_tree)
2887
with ui.ui_factory.nested_progress_bar() as child_pb:
2888
raw_conflicts = resolve_conflicts(
2889
tt, child_pb, lambda t, c: conflict_pass(t, c, target_tree))
2890
conflicts = cook_conflicts(raw_conflicts, tt)
2891
return conflicts, merge_modified
2894
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2895
backups, merge_modified, basis_tree=None):
2896
if basis_tree is not None:
2897
basis_tree.lock_read()
2898
# We ask the working_tree for its changes relative to the target, rather
2899
# than the target changes relative to the working tree. Because WT4 has an
2900
# optimizer to compare itself to a target, but no optimizer for the
2902
change_list = working_tree.iter_changes(
2903
target_tree, specific_files=specific_files, pb=pb)
2904
if not target_tree.is_versioned(u''):
2910
for id_num, (file_id, path, changed_content, versioned, parent, name,
2911
kind, executable) in enumerate(change_list):
2912
target_path, wt_path = path
2913
target_versioned, wt_versioned = versioned
2914
target_parent, wt_parent = parent
2915
target_name, wt_name = name
2916
target_kind, wt_kind = kind
2917
target_executable, wt_executable = executable
2918
if skip_root and wt_parent is None:
2920
trans_id = tt.trans_id_file_id(file_id)
2923
keep_content = False
2924
if wt_kind == 'file' and (backups or target_kind is None):
2925
wt_sha1 = working_tree.get_file_sha1(wt_path)
2926
if merge_modified.get(file_id) != wt_sha1:
2927
# acquire the basis tree lazily to prevent the
2928
# expense of accessing it when it's not needed ?
2929
# (Guessing, RBC, 200702)
2930
if basis_tree is None:
2931
basis_tree = working_tree.basis_tree()
2932
basis_tree.lock_read()
2933
basis_path = find_previous_path(
2934
working_tree, basis_tree, wt_path)
2935
if basis_path is None:
2936
if target_kind is None and not target_versioned:
2939
if wt_sha1 != basis_tree.get_file_sha1(basis_path):
2941
if wt_kind is not None:
2942
if not keep_content:
1121
2943
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2944
elif target_kind is not None:
2945
parent_trans_id = tt.trans_id_file_id(wt_parent)
2946
backup_name = tt._available_backup_name(
2947
wt_name, parent_trans_id)
2948
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2949
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2950
if wt_versioned and target_versioned:
2951
tt.unversion_file(trans_id)
2952
tt.version_file(file_id, new_trans_id)
2953
# New contents should have the same unix perms as old
2956
trans_id = new_trans_id
2957
if target_kind in ('directory', 'tree-reference'):
2958
tt.create_directory(trans_id)
2959
if target_kind == 'tree-reference':
2960
revision = target_tree.get_reference_revision(
2962
tt.set_tree_reference(revision, trans_id)
2963
elif target_kind == 'symlink':
2964
tt.create_symlink(target_tree.get_symlink_target(
2965
target_path), trans_id)
2966
elif target_kind == 'file':
2967
deferred_files.append(
2968
(target_path, (trans_id, mode_id, file_id)))
2969
if basis_tree is None:
2970
basis_tree = working_tree.basis_tree()
2971
basis_tree.lock_read()
2972
new_sha1 = target_tree.get_file_sha1(target_path)
2973
basis_path = find_previous_path(target_tree, basis_tree, target_path)
2974
if (basis_path is not None and
2975
new_sha1 == basis_tree.get_file_sha1(basis_path)):
2976
if file_id in merge_modified:
2977
del merge_modified[file_id]
2979
merge_modified[file_id] = new_sha1
2981
# preserve the execute bit when backing up
2982
if keep_content and wt_executable == target_executable:
2983
tt.set_executability(target_executable, trans_id)
2984
elif target_kind is not None:
2985
raise AssertionError(target_kind)
2986
if not wt_versioned and target_versioned:
2987
tt.version_file(file_id, trans_id)
2988
if wt_versioned and not target_versioned:
2989
tt.unversion_file(trans_id)
2990
if (target_name is not None
2991
and (wt_name != target_name or wt_parent != target_parent)):
2992
if target_name == '' and target_parent is None:
2993
parent_trans = ROOT_PARENT
2995
parent_trans = tt.trans_id_file_id(target_parent)
2996
if wt_parent is None and wt_versioned:
2997
tt.adjust_root_path(target_name, parent_trans)
2999
tt.adjust_path(target_name, parent_trans, trans_id)
3000
if wt_executable != target_executable and target_kind == "file":
3001
tt.set_executability(target_executable, trans_id)
3002
if working_tree.supports_content_filtering():
3003
for (trans_id, mode_id, file_id), bytes in (
3004
target_tree.iter_files_bytes(deferred_files)):
3005
# We're reverting a tree to the target tree so using the
3006
# target tree to find the file path seems the best choice
3007
# here IMO - Ian C 27/Oct/2009
3008
filter_tree_path = target_tree.id2path(file_id)
3009
filters = working_tree._content_filter_stack(filter_tree_path)
3010
bytes = filtered_output_bytes(
3012
ContentFilterContext(filter_tree_path, working_tree))
3013
tt.create_file(bytes, trans_id, mode_id)
3015
for (trans_id, mode_id, file_id), bytes in target_tree.iter_files_bytes(
3017
tt.create_file(bytes, trans_id, mode_id)
3018
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
3020
if basis_tree is not None:
3022
return merge_modified
3025
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3026
"""Make many conflict-resolution attempts, but die if they fail"""
3027
if pass_func is None:
3028
pass_func = conflict_pass
1145
3029
new_conflicts = set()
3030
with ui.ui_factory.nested_progress_bar() as pb:
1147
3031
for n in range(10):
1148
pb.update('Resolution pass', n+1, 10)
3032
pb.update(gettext('Resolution pass'), n + 1, 10)
1149
3033
conflicts = tt.find_conflicts()
1150
3034
if len(conflicts) == 0:
1151
3035
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3036
new_conflicts.update(pass_func(tt, conflicts))
1153
3037
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3040
def conflict_pass(tt, conflicts, path_tree=None):
3041
"""Resolve some classes of conflicts.
3043
:param tt: The transform to resolve conflicts in
3044
:param conflicts: The conflicts to resolve
3045
:param path_tree: A Tree to get supplemental paths from
1160
3047
new_conflicts = set()
1161
3048
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3049
if c_type == 'duplicate id':