861
831
self.create_symlink(target, trans_id)
834
def new_orphan(self, trans_id, parent_id):
835
"""Schedule an item to be orphaned.
837
When a directory is about to be removed, its children, if they are not
838
versioned are moved out of the way: they don't have a parent anymore.
840
:param trans_id: The trans_id of the existing item.
841
:param parent_id: The parent trans_id of the item.
843
raise NotImplementedError(self.new_orphan)
845
def _get_potential_orphans(self, dir_id):
846
"""Find the potential orphans in a directory.
848
A directory can't be safely deleted if there are versioned files in it.
849
If all the contained files are unversioned then they can be orphaned.
851
The 'None' return value means that the directory contains at least one
852
versioned file and should not be deleted.
854
:param dir_id: The directory trans id.
856
:return: A list of the orphan trans ids or None if at least one
857
versioned file is present.
860
# Find the potential orphans, stop if one item should be kept
861
for child_tid in self.by_parent()[dir_id]:
862
if child_tid in self._removed_contents:
863
# The child is removed as part of the transform. Since it was
864
# versioned before, it's not an orphan
866
elif self.final_file_id(child_tid) is None:
867
# The child is not versioned
868
orphans.append(child_tid)
870
# We have a versioned file here, searching for orphans is
876
def _affected_ids(self):
877
"""Return the set of transform ids affected by the transform"""
878
trans_ids = set(self._removed_id)
879
trans_ids.update(self._new_id)
880
trans_ids.update(self._removed_contents)
881
trans_ids.update(self._new_contents)
882
trans_ids.update(self._new_executability)
883
trans_ids.update(self._new_name)
884
trans_ids.update(self._new_parent)
887
def _get_file_id_maps(self):
888
"""Return mapping of file_ids to trans_ids in the to and from states"""
889
trans_ids = self._affected_ids()
892
# Build up two dicts: trans_ids associated with file ids in the
893
# FROM state, vs the TO state.
894
for trans_id in trans_ids:
895
from_file_id = self.tree_file_id(trans_id)
896
if from_file_id is not None:
897
from_trans_ids[from_file_id] = trans_id
898
to_file_id = self.final_file_id(trans_id)
899
if to_file_id is not None:
900
to_trans_ids[to_file_id] = trans_id
901
return from_trans_ids, to_trans_ids
903
def _from_file_data(self, from_trans_id, from_versioned, from_path):
904
"""Get data about a file in the from (tree) state
906
Return a (name, parent, kind, executable) tuple
908
from_path = self._tree_id_paths.get(from_trans_id)
910
# get data from working tree if versioned
911
from_entry = next(self._tree.iter_entries_by_dir(
912
specific_files=[from_path]))[1]
913
from_name = from_entry.name
914
from_parent = from_entry.parent_id
917
if from_path is None:
918
# File does not exist in FROM state
922
# File exists, but is not versioned. Have to use path-
924
from_name = os.path.basename(from_path)
925
tree_parent = self.get_tree_parent(from_trans_id)
926
from_parent = self.tree_file_id(tree_parent)
927
if from_path is not None:
928
from_kind, from_executable, from_stats = \
929
self._tree._comparison_data(from_entry, from_path)
932
from_executable = False
933
return from_name, from_parent, from_kind, from_executable
935
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
936
"""Get data about a file in the to (target) state
938
Return a (name, parent, kind, executable) tuple
940
to_name = self.final_name(to_trans_id)
941
to_kind = self.final_kind(to_trans_id)
942
to_parent = self.final_file_id(self.final_parent(to_trans_id))
943
if to_trans_id in self._new_executability:
944
to_executable = self._new_executability[to_trans_id]
945
elif to_trans_id == from_trans_id:
946
to_executable = from_executable
948
to_executable = False
949
return to_name, to_parent, to_kind, to_executable
951
def iter_changes(self):
952
"""Produce output in the same format as Tree.iter_changes.
954
Will produce nonsensical results if invoked while inventory/filesystem
955
conflicts (as reported by TreeTransform.find_conflicts()) are present.
957
This reads the Transform, but only reproduces changes involving a
958
file_id. Files that are not versioned in either of the FROM or TO
959
states are not reflected.
961
final_paths = FinalPaths(self)
962
from_trans_ids, to_trans_ids = self._get_file_id_maps()
964
# Now iterate through all active file_ids
965
for file_id in set(from_trans_ids).union(to_trans_ids):
967
from_trans_id = from_trans_ids.get(file_id)
968
# find file ids, and determine versioning state
969
if from_trans_id is None:
970
from_versioned = False
971
from_trans_id = to_trans_ids[file_id]
973
from_versioned = True
974
to_trans_id = to_trans_ids.get(file_id)
975
if to_trans_id is None:
977
to_trans_id = from_trans_id
981
if not from_versioned:
984
from_path = self._tree_id_paths.get(from_trans_id)
988
to_path = final_paths.get_path(to_trans_id)
990
from_name, from_parent, from_kind, from_executable = \
991
self._from_file_data(from_trans_id, from_versioned, from_path)
993
to_name, to_parent, to_kind, to_executable = \
994
self._to_file_data(to_trans_id, from_trans_id, from_executable)
996
if from_kind != to_kind:
998
elif to_kind in ('file', 'symlink') and (
999
to_trans_id != from_trans_id
1000
or to_trans_id in self._new_contents):
1002
if (not modified and from_versioned == to_versioned
1003
and from_parent == to_parent and from_name == to_name
1004
and from_executable == to_executable):
1008
file_id, (from_path, to_path), modified,
1009
(from_versioned, to_versioned),
1010
(from_parent, to_parent),
1011
(from_name, to_name),
1012
(from_kind, to_kind),
1013
(from_executable, to_executable)))
1016
return (c.path[0] or '', c.path[1] or '')
1017
return iter(sorted(results, key=path_key))
1019
def get_preview_tree(self):
1020
"""Return a tree representing the result of the transform.
1022
The tree is a snapshot, and altering the TreeTransform will invalidate
1025
return _PreviewTree(self)
1027
def commit(self, branch, message, merge_parents=None, strict=False,
1028
timestamp=None, timezone=None, committer=None, authors=None,
1029
revprops=None, revision_id=None):
1030
"""Commit the result of this TreeTransform to a branch.
1032
:param branch: The branch to commit to.
1033
:param message: The message to attach to the commit.
1034
:param merge_parents: Additional parent revision-ids specified by
1036
:param strict: If True, abort the commit if there are unversioned
1038
:param timestamp: if not None, seconds-since-epoch for the time and
1039
date. (May be a float.)
1040
:param timezone: Optional timezone for timestamp, as an offset in
1042
:param committer: Optional committer in email-id format.
1043
(e.g. "J Random Hacker <jrandom@example.com>")
1044
:param authors: Optional list of authors in email-id format.
1045
:param revprops: Optional dictionary of revision properties.
1046
:param revision_id: Optional revision id. (Specifying a revision-id
1047
may reduce performance for some non-native formats.)
1048
:return: The revision_id of the revision committed.
1050
self._check_malformed()
1052
unversioned = set(self._new_contents).difference(set(self._new_id))
1053
for trans_id in unversioned:
1054
if self.final_file_id(trans_id) is None:
1055
raise errors.StrictCommitFailed()
1057
revno, last_rev_id = branch.last_revision_info()
1058
if last_rev_id == _mod_revision.NULL_REVISION:
1059
if merge_parents is not None:
1060
raise ValueError('Cannot supply merge parents for first'
1064
parent_ids = [last_rev_id]
1065
if merge_parents is not None:
1066
parent_ids.extend(merge_parents)
1067
if self._tree.get_revision_id() != last_rev_id:
1068
raise ValueError('TreeTransform not based on branch basis: %s' %
1069
self._tree.get_revision_id().decode('utf-8'))
1070
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1071
builder = branch.get_commit_builder(parent_ids,
1072
timestamp=timestamp,
1074
committer=committer,
1076
revision_id=revision_id)
1077
preview = self.get_preview_tree()
1078
list(builder.record_iter_changes(preview, last_rev_id,
1079
self.iter_changes()))
1080
builder.finish_inventory()
1081
revision_id = builder.commit(message)
1082
branch.set_last_revision_info(revno + 1, revision_id)
1085
def _text_parent(self, trans_id):
1086
path = self.tree_path(trans_id)
1088
if path is None or self._tree.kind(path) != 'file':
1090
except errors.NoSuchFile:
1094
def _get_parents_texts(self, trans_id):
1095
"""Get texts for compression parents of this file."""
1096
path = self._text_parent(trans_id)
1099
return (self._tree.get_file_text(path),)
1101
def _get_parents_lines(self, trans_id):
1102
"""Get lines for compression parents of this file."""
1103
path = self._text_parent(trans_id)
1106
return (self._tree.get_file_lines(path),)
1108
def serialize(self, serializer):
1109
"""Serialize this TreeTransform.
1111
:param serializer: A Serialiser like pack.ContainerSerializer.
1113
new_name = {k.encode('utf-8'): v.encode('utf-8')
1114
for k, v in viewitems(self._new_name)}
1115
new_parent = {k.encode('utf-8'): v.encode('utf-8')
1116
for k, v in viewitems(self._new_parent)}
1117
new_id = {k.encode('utf-8'): v
1118
for k, v in viewitems(self._new_id)}
1119
new_executability = {k.encode('utf-8'): int(v)
1120
for k, v in viewitems(self._new_executability)}
1121
tree_path_ids = {k.encode('utf-8'): v.encode('utf-8')
1122
for k, v in viewitems(self._tree_path_ids)}
1123
non_present_ids = {k: v.encode('utf-8')
1124
for k, v in viewitems(self._non_present_ids)}
1125
removed_contents = [trans_id.encode('utf-8')
1126
for trans_id in self._removed_contents]
1127
removed_id = [trans_id.encode('utf-8')
1128
for trans_id in self._removed_id]
1130
b'_id_number': self._id_number,
1131
b'_new_name': new_name,
1132
b'_new_parent': new_parent,
1133
b'_new_executability': new_executability,
1135
b'_tree_path_ids': tree_path_ids,
1136
b'_removed_id': removed_id,
1137
b'_removed_contents': removed_contents,
1138
b'_non_present_ids': non_present_ids,
1140
yield serializer.bytes_record(bencode.bencode(attribs),
1142
for trans_id, kind in sorted(viewitems(self._new_contents)):
1144
with open(self._limbo_name(trans_id), 'rb') as cur_file:
1145
lines = cur_file.readlines()
1146
parents = self._get_parents_lines(trans_id)
1147
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1148
content = b''.join(mpdiff.to_patch())
1149
if kind == 'directory':
1151
if kind == 'symlink':
1152
content = self._read_symlink_target(trans_id)
1153
if not isinstance(content, bytes):
1154
content = content.encode('utf-8')
1155
yield serializer.bytes_record(
1156
content, ((trans_id.encode('utf-8'), kind.encode('ascii')),))
1158
def deserialize(self, records):
1159
"""Deserialize a stored TreeTransform.
1161
:param records: An iterable of (names, content) tuples, as per
1162
pack.ContainerPushParser.
1164
names, content = next(records)
1165
attribs = bencode.bdecode(content)
1166
self._id_number = attribs[b'_id_number']
1167
self._new_name = {k.decode('utf-8'): v.decode('utf-8')
1168
for k, v in viewitems(attribs[b'_new_name'])}
1169
self._new_parent = {k.decode('utf-8'): v.decode('utf-8')
1170
for k, v in viewitems(attribs[b'_new_parent'])}
1171
self._new_executability = {
1172
k.decode('utf-8'): bool(v)
1173
for k, v in viewitems(attribs[b'_new_executability'])}
1174
self._new_id = {k.decode('utf-8'): v
1175
for k, v in viewitems(attribs[b'_new_id'])}
1176
self._r_new_id = {v: k for k, v in viewitems(self._new_id)}
1177
self._tree_path_ids = {}
1178
self._tree_id_paths = {}
1179
for bytepath, trans_id in viewitems(attribs[b'_tree_path_ids']):
1180
path = bytepath.decode('utf-8')
1181
trans_id = trans_id.decode('utf-8')
1182
self._tree_path_ids[path] = trans_id
1183
self._tree_id_paths[trans_id] = path
1184
self._removed_id = {trans_id.decode('utf-8')
1185
for trans_id in attribs[b'_removed_id']}
1186
self._removed_contents = set(
1187
trans_id.decode('utf-8')
1188
for trans_id in attribs[b'_removed_contents'])
1189
self._non_present_ids = {
1190
k: v.decode('utf-8')
1191
for k, v in viewitems(attribs[b'_non_present_ids'])}
1192
for ((trans_id, kind),), content in records:
1193
trans_id = trans_id.decode('utf-8')
1194
kind = kind.decode('ascii')
1196
mpdiff = multiparent.MultiParent.from_patch(content)
1197
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1198
self.create_file(lines, trans_id)
1199
if kind == 'directory':
1200
self.create_directory(trans_id)
1201
if kind == 'symlink':
1202
self.create_symlink(content.decode('utf-8'), trans_id)
1205
class DiskTreeTransform(TreeTransformBase):
1206
"""Tree transform storing its contents on disk."""
1208
def __init__(self, tree, limbodir, pb=None, case_sensitive=True):
1210
:param tree: The tree that will be transformed, but not necessarily
1212
:param limbodir: A directory where new files can be stored until
1213
they are installed in their proper places
1215
:param case_sensitive: If True, the target of the transform is
1216
case sensitive, not just case preserving.
1218
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1219
self._limbodir = limbodir
1220
self._deletiondir = None
1221
# A mapping of transform ids to their limbo filename
1222
self._limbo_files = {}
1223
self._possibly_stale_limbo_files = set()
1224
# A mapping of transform ids to a set of the transform ids of children
1225
# that their limbo directory has
1226
self._limbo_children = {}
1227
# Map transform ids to maps of child filename to child transform id
1228
self._limbo_children_names = {}
1229
# List of transform ids that need to be renamed from limbo into place
1230
self._needs_rename = set()
1231
self._creation_mtime = None
1232
self._create_symlinks = osutils.supports_symlinks(self._limbodir)
1235
"""Release the working tree lock, if held, clean up limbo dir.
1237
This is required if apply has not been invoked, but can be invoked
1240
if self._tree is None:
1243
limbo_paths = list(viewvalues(self._limbo_files))
1244
limbo_paths.extend(self._possibly_stale_limbo_files)
1245
limbo_paths.sort(reverse=True)
1246
for path in limbo_paths:
1249
except OSError as e:
1250
if e.errno != errno.ENOENT:
1252
# XXX: warn? perhaps we just got interrupted at an
1253
# inconvenient moment, but perhaps files are disappearing
1256
delete_any(self._limbodir)
1258
# We don't especially care *why* the dir is immortal.
1259
raise ImmortalLimbo(self._limbodir)
1261
if self._deletiondir is not None:
1262
delete_any(self._deletiondir)
1264
raise errors.ImmortalPendingDeletion(self._deletiondir)
1266
TreeTransformBase.finalize(self)
1268
def _limbo_supports_executable(self):
1269
"""Check if the limbo path supports the executable bit."""
1270
return osutils.supports_executable(self._limbodir)
1272
def _limbo_name(self, trans_id):
1273
"""Generate the limbo name of a file"""
1274
limbo_name = self._limbo_files.get(trans_id)
1275
if limbo_name is None:
1276
limbo_name = self._generate_limbo_path(trans_id)
1277
self._limbo_files[trans_id] = limbo_name
1280
def _generate_limbo_path(self, trans_id):
1281
"""Generate a limbo path using the trans_id as the relative path.
1283
This is suitable as a fallback, and when the transform should not be
1284
sensitive to the path encoding of the limbo directory.
1286
self._needs_rename.add(trans_id)
1287
return pathjoin(self._limbodir, trans_id)
1289
def adjust_path(self, name, parent, trans_id):
1290
previous_parent = self._new_parent.get(trans_id)
1291
previous_name = self._new_name.get(trans_id)
1292
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1293
if (trans_id in self._limbo_files
1294
and trans_id not in self._needs_rename):
1295
self._rename_in_limbo([trans_id])
1296
if previous_parent != parent:
1297
self._limbo_children[previous_parent].remove(trans_id)
1298
if previous_parent != parent or previous_name != name:
1299
del self._limbo_children_names[previous_parent][previous_name]
1301
def _rename_in_limbo(self, trans_ids):
1302
"""Fix limbo names so that the right final path is produced.
1304
This means we outsmarted ourselves-- we tried to avoid renaming
1305
these files later by creating them with their final names in their
1306
final parents. But now the previous name or parent is no longer
1307
suitable, so we have to rename them.
1309
Even for trans_ids that have no new contents, we must remove their
1310
entries from _limbo_files, because they are now stale.
1312
for trans_id in trans_ids:
1313
old_path = self._limbo_files[trans_id]
1314
self._possibly_stale_limbo_files.add(old_path)
1315
del self._limbo_files[trans_id]
1316
if trans_id not in self._new_contents:
1318
new_path = self._limbo_name(trans_id)
1319
os.rename(old_path, new_path)
1320
self._possibly_stale_limbo_files.remove(old_path)
1321
for descendant in self._limbo_descendants(trans_id):
1322
desc_path = self._limbo_files[descendant]
1323
desc_path = new_path + desc_path[len(old_path):]
1324
self._limbo_files[descendant] = desc_path
1326
def _limbo_descendants(self, trans_id):
1327
"""Return the set of trans_ids whose limbo paths descend from this."""
1328
descendants = set(self._limbo_children.get(trans_id, []))
1329
for descendant in list(descendants):
1330
descendants.update(self._limbo_descendants(descendant))
1333
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1334
"""Schedule creation of a new file.
1338
:param contents: an iterator of strings, all of which will be written
1339
to the target destination.
1340
:param trans_id: TreeTransform handle
1341
:param mode_id: If not None, force the mode of the target file to match
1342
the mode of the object referenced by mode_id.
1343
Otherwise, we will try to preserve mode bits of an existing file.
1344
:param sha1: If the sha1 of this content is already known, pass it in.
1345
We can use it to prevent future sha1 computations.
1347
name = self._limbo_name(trans_id)
1348
with open(name, 'wb') as f:
1349
unique_add(self._new_contents, trans_id, 'file')
1350
f.writelines(contents)
1351
self._set_mtime(name)
1352
self._set_mode(trans_id, mode_id, S_ISREG)
1353
# It is unfortunate we have to use lstat instead of fstat, but we just
1354
# used utime and chmod on the file, so we need the accurate final
1356
if sha1 is not None:
1357
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1359
def _read_symlink_target(self, trans_id):
1360
return os.readlink(self._limbo_name(trans_id))
1362
def _set_mtime(self, path):
1363
"""All files that are created get the same mtime.
1365
This time is set by the first object to be created.
1367
if self._creation_mtime is None:
1368
self._creation_mtime = time.time()
1369
os.utime(path, (self._creation_mtime, self._creation_mtime))
1371
def create_hardlink(self, path, trans_id):
1372
"""Schedule creation of a hard link"""
1373
name = self._limbo_name(trans_id)
1376
except OSError as e:
1377
if e.errno != errno.EPERM:
1379
raise errors.HardLinkNotSupported(path)
1381
unique_add(self._new_contents, trans_id, 'file')
1382
except BaseException:
1383
# Clean up the file, it never got registered so
1384
# TreeTransform.finalize() won't clean it up.
1388
def create_directory(self, trans_id):
1389
"""Schedule creation of a new directory.
1391
See also new_directory.
1393
os.mkdir(self._limbo_name(trans_id))
1394
unique_add(self._new_contents, trans_id, 'directory')
1396
def create_symlink(self, target, trans_id):
1397
"""Schedule creation of a new symbolic link.
1399
target is a bytestring.
1400
See also new_symlink.
1402
if self._create_symlinks:
1403
os.symlink(target, self._limbo_name(trans_id))
1406
path = FinalPaths(self).get_path(trans_id)
1410
'Unable to create symlink "%s" on this filesystem.' % (path,))
1411
# We add symlink to _new_contents even if they are unsupported
1412
# and not created. These entries are subsequently used to avoid
1413
# conflicts on platforms that don't support symlink
1414
unique_add(self._new_contents, trans_id, 'symlink')
1416
def cancel_creation(self, trans_id):
1417
"""Cancel the creation of new file contents."""
1418
del self._new_contents[trans_id]
1419
if trans_id in self._observed_sha1s:
1420
del self._observed_sha1s[trans_id]
1421
children = self._limbo_children.get(trans_id)
1422
# if this is a limbo directory with children, move them before removing
1424
if children is not None:
1425
self._rename_in_limbo(children)
1426
del self._limbo_children[trans_id]
1427
del self._limbo_children_names[trans_id]
1428
delete_any(self._limbo_name(trans_id))
1430
def new_orphan(self, trans_id, parent_id):
1431
conf = self._tree.get_config_stack()
1432
handle_orphan = conf.get('transform.orphan_policy')
1433
handle_orphan(self, trans_id, parent_id)
1436
class OrphaningError(errors.BzrError):
1438
# Only bugs could lead to such exception being seen by the user
1439
internal_error = True
1440
_fmt = "Error while orphaning %s in %s directory"
1442
def __init__(self, orphan, parent):
1443
errors.BzrError.__init__(self)
1444
self.orphan = orphan
1445
self.parent = parent
1448
class OrphaningForbidden(OrphaningError):
1450
_fmt = "Policy: %s doesn't allow creating orphans."
1452
def __init__(self, policy):
1453
errors.BzrError.__init__(self)
1454
self.policy = policy
1457
def move_orphan(tt, orphan_id, parent_id):
1458
"""See TreeTransformBase.new_orphan.
1460
This creates a new orphan in the `brz-orphans` dir at the root of the
1463
:param tt: The TreeTransform orphaning `trans_id`.
1465
:param orphan_id: The trans id that should be orphaned.
1467
:param parent_id: The orphan parent trans id.
1469
# Add the orphan dir if it doesn't exist
1470
orphan_dir_basename = 'brz-orphans'
1471
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1472
if tt.final_kind(od_id) is None:
1473
tt.create_directory(od_id)
1474
parent_path = tt._tree_id_paths[parent_id]
1475
# Find a name that doesn't exist yet in the orphan dir
1476
actual_name = tt.final_name(orphan_id)
1477
new_name = tt._available_backup_name(actual_name, od_id)
1478
tt.adjust_path(new_name, od_id, orphan_id)
1479
trace.warning('%s has been orphaned in %s'
1480
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1483
def refuse_orphan(tt, orphan_id, parent_id):
1484
"""See TreeTransformBase.new_orphan.
1486
This refuses to create orphan, letting the caller handle the conflict.
1488
raise OrphaningForbidden('never')
1491
orphaning_registry = registry.Registry()
1492
orphaning_registry.register(
1493
u'conflict', refuse_orphan,
1494
'Leave orphans in place and create a conflict on the directory.')
1495
orphaning_registry.register(
1496
u'move', move_orphan,
1497
'Move orphans into the brz-orphans directory.')
1498
orphaning_registry._set_default_key(u'conflict')
1501
opt_transform_orphan = _mod_config.RegistryOption(
1502
'transform.orphan_policy', orphaning_registry,
1503
help='Policy for orphaned files during transform operations.',
1507
class TreeTransform(DiskTreeTransform):
1508
"""Represent a tree transformation.
1510
This object is designed to support incremental generation of the transform,
1513
However, it gives optimum performance when parent directories are created
1514
before their contents. The transform is then able to put child files
1515
directly in their parent directory, avoiding later renames.
1517
It is easy to produce malformed transforms, but they are generally
1518
harmless. Attempting to apply a malformed transform will cause an
1519
exception to be raised before any modifications are made to the tree.
1521
Many kinds of malformed transforms can be corrected with the
1522
resolve_conflicts function. The remaining ones indicate programming error,
1523
such as trying to create a file with no path.
1525
Two sets of file creation methods are supplied. Convenience methods are:
1530
These are composed of the low-level methods:
1532
* create_file or create_directory or create_symlink
1536
Transform/Transaction ids
1537
-------------------------
1538
trans_ids are temporary ids assigned to all files involved in a transform.
1539
It's possible, even common, that not all files in the Tree have trans_ids.
1541
trans_ids are used because filenames and file_ids are not good enough
1542
identifiers; filenames change, and not all files have file_ids. File-ids
1543
are also associated with trans-ids, so that moving a file moves its
1546
trans_ids are only valid for the TreeTransform that generated them.
1550
Limbo is a temporary directory use to hold new versions of files.
1551
Files are added to limbo by create_file, create_directory, create_symlink,
1552
and their convenience variants (new_*). Files may be removed from limbo
1553
using cancel_creation. Files are renamed from limbo into their final
1554
location as part of TreeTransform.apply
1556
Limbo must be cleaned up, by either calling TreeTransform.apply or
1557
calling TreeTransform.finalize.
1559
Files are placed into limbo inside their parent directories, where
1560
possible. This reduces subsequent renames, and makes operations involving
1561
lots of files faster. This optimization is only possible if the parent
1562
directory is created *before* creating any of its children, so avoid
1563
creating children before parents, where possible.
1567
This temporary directory is used by _FileMover for storing files that are
1568
about to be deleted. In case of rollback, the files will be restored.
1569
FileMover does not delete files until it is sure that a rollback will not
1573
def __init__(self, tree, pb=None):
1574
"""Note: a tree_write lock is taken on the tree.
1576
Use TreeTransform.finalize() to release the lock (can be omitted if
1577
TreeTransform.apply() called).
1579
tree.lock_tree_write()
1581
limbodir = urlutils.local_path_from_url(
1582
tree._transport.abspath('limbo'))
1583
osutils.ensure_empty_directory_exists(
1585
errors.ExistingLimbo)
1586
deletiondir = urlutils.local_path_from_url(
1587
tree._transport.abspath('pending-deletion'))
1588
osutils.ensure_empty_directory_exists(
1590
errors.ExistingPendingDeletion)
1591
except BaseException:
1595
# Cache of realpath results, to speed up canonical_path
1596
self._realpaths = {}
1597
# Cache of relpath results, to speed up canonical_path
1599
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1600
tree.case_sensitive)
1601
self._deletiondir = deletiondir
1603
def canonical_path(self, path):
1604
"""Get the canonical tree-relative path"""
1605
# don't follow final symlinks
1606
abs = self._tree.abspath(path)
1607
if abs in self._relpaths:
1608
return self._relpaths[abs]
1609
dirname, basename = os.path.split(abs)
1610
if dirname not in self._realpaths:
1611
self._realpaths[dirname] = os.path.realpath(dirname)
1612
dirname = self._realpaths[dirname]
1613
abs = pathjoin(dirname, basename)
1614
if dirname in self._relpaths:
1615
relpath = pathjoin(self._relpaths[dirname], basename)
1616
relpath = relpath.rstrip('/\\')
1618
relpath = self._tree.relpath(abs)
1619
self._relpaths[abs] = relpath
1622
def tree_kind(self, trans_id):
1623
"""Determine the file kind in the working tree.
1625
:returns: The file kind or None if the file does not exist
1627
path = self._tree_id_paths.get(trans_id)
1631
return file_kind(self._tree.abspath(path))
1632
except errors.NoSuchFile:
1635
def _set_mode(self, trans_id, mode_id, typefunc):
1636
"""Set the mode of new file contents.
1637
The mode_id is the existing file to get the mode from (often the same
1638
as trans_id). The operation is only performed if there's a mode match
1639
according to typefunc.
1644
old_path = self._tree_id_paths[mode_id]
1648
mode = os.stat(self._tree.abspath(old_path)).st_mode
1649
except OSError as e:
1650
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1651
# Either old_path doesn't exist, or the parent of the
1652
# target is not a directory (but will be one eventually)
1653
# Either way, we know it doesn't exist *right now*
1654
# See also bug #248448
1659
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1661
def iter_tree_children(self, parent_id):
1662
"""Iterate through the entry's tree children, if any"""
1664
path = self._tree_id_paths[parent_id]
1668
children = os.listdir(self._tree.abspath(path))
1669
except OSError as e:
1670
if not (osutils._is_error_enotdir(e) or
1671
e.errno in (errno.ENOENT, errno.ESRCH)):
1675
for child in children:
1676
childpath = joinpath(path, child)
1677
if self._tree.is_control_filename(childpath):
1679
yield self.trans_id_tree_path(childpath)
1681
def _generate_limbo_path(self, trans_id):
1682
"""Generate a limbo path using the final path if possible.
1684
This optimizes the performance of applying the tree transform by
1685
avoiding renames. These renames can be avoided only when the parent
1686
directory is already scheduled for creation.
1688
If the final path cannot be used, falls back to using the trans_id as
1691
parent = self._new_parent.get(trans_id)
1692
# if the parent directory is already in limbo (e.g. when building a
1693
# tree), choose a limbo name inside the parent, to reduce further
1695
use_direct_path = False
1696
if self._new_contents.get(parent) == 'directory':
1697
filename = self._new_name.get(trans_id)
1698
if filename is not None:
1699
if parent not in self._limbo_children:
1700
self._limbo_children[parent] = set()
1701
self._limbo_children_names[parent] = {}
1702
use_direct_path = True
1703
# the direct path can only be used if no other file has
1704
# already taken this pathname, i.e. if the name is unused, or
1705
# if it is already associated with this trans_id.
1706
elif self._case_sensitive_target:
1707
if (self._limbo_children_names[parent].get(filename)
1708
in (trans_id, None)):
1709
use_direct_path = True
1711
for l_filename, l_trans_id in viewitems(
1712
self._limbo_children_names[parent]):
1713
if l_trans_id == trans_id:
1715
if l_filename.lower() == filename.lower():
1718
use_direct_path = True
1720
if not use_direct_path:
1721
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1723
limbo_name = pathjoin(self._limbo_files[parent], filename)
1724
self._limbo_children[parent].add(trans_id)
1725
self._limbo_children_names[parent][filename] = trans_id
1728
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1729
"""Apply all changes to the inventory and filesystem.
1731
If filesystem or inventory conflicts are present, MalformedTransform
1734
If apply succeeds, finalize is not necessary.
1736
:param no_conflicts: if True, the caller guarantees there are no
1737
conflicts, so no check is made.
1738
:param precomputed_delta: An inventory delta to use instead of
1740
:param _mover: Supply an alternate FileMover, for testing
1742
for hook in MutableTree.hooks['pre_transform']:
1743
hook(self._tree, self)
1744
if not no_conflicts:
1745
self._check_malformed()
1746
with ui.ui_factory.nested_progress_bar() as child_pb:
1747
if precomputed_delta is None:
1748
child_pb.update(gettext('Apply phase'), 0, 2)
1749
inventory_delta = self._generate_inventory_delta()
1752
inventory_delta = precomputed_delta
1755
mover = _FileMover()
1759
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1760
self._apply_removals(mover)
1761
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1762
modified_paths = self._apply_insertions(mover)
1763
except BaseException:
1767
mover.apply_deletions()
1768
if self.final_file_id(self.root) is None:
1769
inventory_delta = [e for e in inventory_delta if e[0] != '']
1770
self._tree.apply_inventory_delta(inventory_delta)
1771
self._apply_observed_sha1s()
1774
return _TransformResults(modified_paths, self.rename_count)
1776
def _generate_inventory_delta(self):
1777
"""Generate an inventory delta for the current transform."""
1778
inventory_delta = []
1779
new_paths = self._inventory_altered()
1780
total_entries = len(new_paths) + len(self._removed_id)
1781
with ui.ui_factory.nested_progress_bar() as child_pb:
1782
for num, trans_id in enumerate(self._removed_id):
1784
child_pb.update(gettext('removing file'),
1786
if trans_id == self._new_root:
1787
file_id = self._tree.get_root_id()
1789
file_id = self.tree_file_id(trans_id)
1790
# File-id isn't really being deleted, just moved
1791
if file_id in self._r_new_id:
1793
path = self._tree_id_paths[trans_id]
1794
inventory_delta.append((path, None, file_id, None))
1795
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1797
for num, (path, trans_id) in enumerate(new_paths):
1799
child_pb.update(gettext('adding file'),
1800
num + len(self._removed_id), total_entries)
1801
file_id = new_path_file_ids[trans_id]
1804
kind = self.final_kind(trans_id)
1806
kind = self._tree.stored_kind(self._tree.id2path(file_id))
1807
parent_trans_id = self.final_parent(trans_id)
1808
parent_file_id = new_path_file_ids.get(parent_trans_id)
1809
if parent_file_id is None:
1810
parent_file_id = self.final_file_id(parent_trans_id)
1811
if trans_id in self._new_reference_revision:
1812
new_entry = inventory.TreeReference(
1814
self._new_name[trans_id],
1815
self.final_file_id(self._new_parent[trans_id]),
1816
None, self._new_reference_revision[trans_id])
1818
new_entry = inventory.make_entry(kind,
1819
self.final_name(trans_id),
1820
parent_file_id, file_id)
1822
old_path = self._tree.id2path(new_entry.file_id)
1823
except errors.NoSuchId:
1825
new_executability = self._new_executability.get(trans_id)
1826
if new_executability is not None:
1827
new_entry.executable = new_executability
1828
inventory_delta.append(
1829
(old_path, path, new_entry.file_id, new_entry))
1830
return inventory_delta
1832
def _apply_removals(self, mover):
1833
"""Perform tree operations that remove directory/inventory names.
1835
That is, delete files that are to be deleted, and put any files that
1836
need renaming into limbo. This must be done in strict child-to-parent
1839
If inventory_delta is None, no inventory delta generation is performed.
1841
tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
1842
with ui.ui_factory.nested_progress_bar() as child_pb:
1843
for num, (path, trans_id) in enumerate(tree_paths):
1844
# do not attempt to move root into a subdirectory of itself.
1847
child_pb.update(gettext('removing file'), num, len(tree_paths))
1848
full_path = self._tree.abspath(path)
1849
if trans_id in self._removed_contents:
1850
delete_path = os.path.join(self._deletiondir, trans_id)
1851
mover.pre_delete(full_path, delete_path)
1852
elif (trans_id in self._new_name or
1853
trans_id in self._new_parent):
1855
mover.rename(full_path, self._limbo_name(trans_id))
1856
except errors.TransformRenameFailed as e:
1857
if e.errno != errno.ENOENT:
1860
self.rename_count += 1
1862
def _apply_insertions(self, mover):
1863
"""Perform tree operations that insert directory/inventory names.
1865
That is, create any files that need to be created, and restore from
1866
limbo any files that needed renaming. This must be done in strict
1867
parent-to-child order.
1869
If inventory_delta is None, no inventory delta is calculated, and
1870
no list of modified paths is returned.
1872
new_paths = self.new_paths(filesystem_only=True)
1874
with ui.ui_factory.nested_progress_bar() as child_pb:
1875
for num, (path, trans_id) in enumerate(new_paths):
1877
child_pb.update(gettext('adding file'),
1878
num, len(new_paths))
1879
full_path = self._tree.abspath(path)
1880
if trans_id in self._needs_rename:
1882
mover.rename(self._limbo_name(trans_id), full_path)
1883
except errors.TransformRenameFailed as e:
1884
# We may be renaming a dangling inventory id
1885
if e.errno != errno.ENOENT:
1888
self.rename_count += 1
1889
# TODO: if trans_id in self._observed_sha1s, we should
1890
# re-stat the final target, since ctime will be
1891
# updated by the change.
1892
if (trans_id in self._new_contents
1893
or self.path_changed(trans_id)):
1894
if trans_id in self._new_contents:
1895
modified_paths.append(full_path)
1896
if trans_id in self._new_executability:
1897
self._set_executability(path, trans_id)
1898
if trans_id in self._observed_sha1s:
1899
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1900
st = osutils.lstat(full_path)
1901
self._observed_sha1s[trans_id] = (o_sha1, st)
1902
for path, trans_id in new_paths:
1903
# new_paths includes stuff like workingtree conflicts. Only the
1904
# stuff in new_contents actually comes from limbo.
1905
if trans_id in self._limbo_files:
1906
del self._limbo_files[trans_id]
1907
self._new_contents.clear()
1908
return modified_paths
1910
def _apply_observed_sha1s(self):
1911
"""After we have finished renaming everything, update observed sha1s
1913
This has to be done after self._tree.apply_inventory_delta, otherwise
1914
it doesn't know anything about the files we are updating. Also, we want
1915
to do this as late as possible, so that most entries end up cached.
1917
# TODO: this doesn't update the stat information for directories. So
1918
# the first 'bzr status' will still need to rewrite
1919
# .bzr/checkout/dirstate. However, we at least don't need to
1920
# re-read all of the files.
1921
# TODO: If the operation took a while, we could do a time.sleep(3) here
1922
# to allow the clock to tick over and ensure we won't have any
1923
# problems. (we could observe start time, and finish time, and if
1924
# it is less than eg 10% overhead, add a sleep call.)
1925
paths = FinalPaths(self)
1926
for trans_id, observed in viewitems(self._observed_sha1s):
1927
path = paths.get_path(trans_id)
1928
self._tree._observed_sha1(path, observed)
1931
class TransformPreview(DiskTreeTransform):
1932
"""A TreeTransform for generating preview trees.
1934
Unlike TreeTransform, this version works when the input tree is a
1935
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1936
unversioned files in the input tree.
1939
def __init__(self, tree, pb=None, case_sensitive=True):
1941
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1942
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1944
def canonical_path(self, path):
1947
def tree_kind(self, trans_id):
1948
path = self._tree_id_paths.get(trans_id)
1951
kind = self._tree.path_content_summary(path)[0]
1952
if kind == 'missing':
1956
def _set_mode(self, trans_id, mode_id, typefunc):
1957
"""Set the mode of new file contents.
1958
The mode_id is the existing file to get the mode from (often the same
1959
as trans_id). The operation is only performed if there's a mode match
1960
according to typefunc.
1962
# is it ok to ignore this? probably
1965
def iter_tree_children(self, parent_id):
1966
"""Iterate through the entry's tree children, if any"""
1968
path = self._tree_id_paths[parent_id]
1972
entry = next(self._tree.iter_entries_by_dir(
1973
specific_files=[path]))[1]
1974
except StopIteration:
1976
children = getattr(entry, 'children', {})
1977
for child in children:
1978
childpath = joinpath(path, child)
1979
yield self.trans_id_tree_path(childpath)
1981
def new_orphan(self, trans_id, parent_id):
1982
raise NotImplementedError(self.new_orphan)
1985
class _PreviewTree(inventorytree.InventoryTree):
1986
"""Partial implementation of Tree to support show_diff_trees"""
1988
def __init__(self, transform):
1989
self._transform = transform
1990
self._final_paths = FinalPaths(transform)
1991
self.__by_parent = None
1992
self._parent_ids = []
1993
self._all_children_cache = {}
1994
self._path2trans_id_cache = {}
1995
self._final_name_cache = {}
1996
self._iter_changes_cache = dict((c.file_id, c) for c in
1997
self._transform.iter_changes())
1999
def _content_change(self, file_id):
2000
"""Return True if the content of this file changed"""
2001
changes = self._iter_changes_cache.get(file_id)
2002
# changes[2] is true if the file content changed. See
2003
# InterTree.iter_changes.
2004
return (changes is not None and changes[2])
2006
def _get_repository(self):
2007
repo = getattr(self._transform._tree, '_repository', None)
2009
repo = self._transform._tree.branch.repository
2012
def _iter_parent_trees(self):
2013
for revision_id in self.get_parent_ids():
2015
yield self.revision_tree(revision_id)
2016
except errors.NoSuchRevisionInTree:
2017
yield self._get_repository().revision_tree(revision_id)
2019
def _get_file_revision(self, path, file_id, vf, tree_revision):
2021
(file_id, t.get_file_revision(t.id2path(file_id)))
2022
for t in self._iter_parent_trees()]
2023
vf.add_lines((file_id, tree_revision), parent_keys,
2024
self.get_file_lines(path))
2025
repo = self._get_repository()
2026
base_vf = repo.texts
2027
if base_vf not in vf.fallback_versionedfiles:
2028
vf.fallback_versionedfiles.append(base_vf)
2029
return tree_revision
2031
def _stat_limbo_file(self, trans_id):
2032
name = self._transform._limbo_name(trans_id)
2033
return os.lstat(name)
2036
def _by_parent(self):
2037
if self.__by_parent is None:
2038
self.__by_parent = self._transform.by_parent()
2039
return self.__by_parent
2041
def _comparison_data(self, entry, path):
2042
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2043
if kind == 'missing':
2047
file_id = self._transform.final_file_id(self._path2trans_id(path))
2048
executable = self.is_executable(path)
2049
return kind, executable, None
2051
def is_locked(self):
2054
def lock_read(self):
2055
# Perhaps in theory, this should lock the TreeTransform?
2056
return lock.LogicalLockResult(self.unlock)
2062
def root_inventory(self):
2063
"""This Tree does not use inventory as its backing data."""
2064
raise NotImplementedError(_PreviewTree.root_inventory)
2066
def get_root_id(self):
2067
return self._transform.final_file_id(self._transform.root)
2069
def all_file_ids(self):
2070
tree_ids = set(self._transform._tree.all_file_ids())
2071
tree_ids.difference_update(self._transform.tree_file_id(t)
2072
for t in self._transform._removed_id)
2073
tree_ids.update(viewvalues(self._transform._new_id))
2076
def all_versioned_paths(self):
2077
tree_paths = set(self._transform._tree.all_versioned_paths())
2079
tree_paths.difference_update(
2080
self._transform.trans_id_tree_path(t)
2081
for t in self._transform._removed_id)
2084
self._final_paths._determine_path(t)
2085
for t in self._transform._new_id)
2089
def _has_id(self, file_id, fallback_check):
2090
if file_id in self._transform._r_new_id:
2092
elif file_id in {self._transform.tree_file_id(trans_id) for
2093
trans_id in self._transform._removed_id}:
2096
return fallback_check(file_id)
2098
def has_id(self, file_id):
2099
return self._has_id(file_id, self._transform._tree.has_id)
2101
def has_or_had_id(self, file_id):
2102
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2104
def _path2trans_id(self, path):
2105
# We must not use None here, because that is a valid value to store.
2106
trans_id = self._path2trans_id_cache.get(path, object)
2107
if trans_id is not object:
2109
segments = splitpath(path)
2110
cur_parent = self._transform.root
2111
for cur_segment in segments:
2112
for child in self._all_children(cur_parent):
2113
final_name = self._final_name_cache.get(child)
2114
if final_name is None:
2115
final_name = self._transform.final_name(child)
2116
self._final_name_cache[child] = final_name
2117
if final_name == cur_segment:
2121
self._path2trans_id_cache[path] = None
2123
self._path2trans_id_cache[path] = cur_parent
2126
def path2id(self, path):
2127
if isinstance(path, list):
2130
path = osutils.pathjoin(*path)
2131
return self._transform.final_file_id(self._path2trans_id(path))
2133
def id2path(self, file_id):
2134
trans_id = self._transform.trans_id_file_id(file_id)
2136
return self._final_paths._determine_path(trans_id)
2138
raise errors.NoSuchId(self, file_id)
2140
def _all_children(self, trans_id):
2141
children = self._all_children_cache.get(trans_id)
2142
if children is not None:
2144
children = set(self._transform.iter_tree_children(trans_id))
2145
# children in the _new_parent set are provided by _by_parent.
2146
children.difference_update(self._transform._new_parent)
2147
children.update(self._by_parent.get(trans_id, []))
2148
self._all_children_cache[trans_id] = children
2151
def _iter_children(self, file_id):
2152
trans_id = self._transform.trans_id_file_id(file_id)
2153
for child_trans_id in self._all_children(trans_id):
2154
yield self._transform.final_file_id(child_trans_id)
2157
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2158
in self._transform._tree.extras())
2159
possible_extras.update(self._transform._new_contents)
2160
possible_extras.update(self._transform._removed_id)
2161
for trans_id in possible_extras:
2162
if self._transform.final_file_id(trans_id) is None:
2163
yield self._final_paths._determine_path(trans_id)
2165
def _make_inv_entries(self, ordered_entries, specific_files=None):
2166
for trans_id, parent_file_id in ordered_entries:
2167
file_id = self._transform.final_file_id(trans_id)
2170
if (specific_files is not None
2171
and self._final_paths.get_path(trans_id) not in specific_files):
2173
kind = self._transform.final_kind(trans_id)
2175
kind = self._transform._tree.stored_kind(
2176
self._transform._tree.id2path(file_id))
2177
new_entry = inventory.make_entry(
2179
self._transform.final_name(trans_id),
2180
parent_file_id, file_id)
2181
yield new_entry, trans_id
2183
def _list_files_by_dir(self):
2184
todo = [ROOT_PARENT]
2186
while len(todo) > 0:
2188
parent_file_id = self._transform.final_file_id(parent)
2189
children = list(self._all_children(parent))
2190
paths = dict(zip(children, self._final_paths.get_paths(children)))
2191
children.sort(key=paths.get)
2192
todo.extend(reversed(children))
2193
for trans_id in children:
2194
ordered_ids.append((trans_id, parent_file_id))
2197
def iter_child_entries(self, path):
2198
trans_id = self._path2trans_id(path)
2199
if trans_id is None:
2200
raise errors.NoSuchFile(path)
2201
todo = [(child_trans_id, trans_id) for child_trans_id in
2202
self._all_children(trans_id)]
2203
for entry, trans_id in self._make_inv_entries(todo):
2206
def iter_entries_by_dir(self, specific_files=None):
2207
# This may not be a maximally efficient implementation, but it is
2208
# reasonably straightforward. An implementation that grafts the
2209
# TreeTransform changes onto the tree's iter_entries_by_dir results
2210
# might be more efficient, but requires tricky inferences about stack
2212
ordered_ids = self._list_files_by_dir()
2213
for entry, trans_id in self._make_inv_entries(ordered_ids,
2215
yield self._final_paths.get_path(trans_id), entry
2217
def _iter_entries_for_dir(self, dir_path):
2218
"""Return path, entry for items in a directory without recursing down."""
2220
dir_trans_id = self._path2trans_id(dir_path)
2221
dir_id = self._transform.final_file_id(dir_trans_id)
2222
for child_trans_id in self._all_children(dir_trans_id):
2223
ordered_ids.append((child_trans_id, dir_id))
2225
for entry, trans_id in self._make_inv_entries(ordered_ids):
2226
path_entries.append((self._final_paths.get_path(trans_id), entry))
2230
def list_files(self, include_root=False, from_dir=None, recursive=True):
2231
"""See WorkingTree.list_files."""
2232
# XXX This should behave like WorkingTree.list_files, but is really
2233
# more like RevisionTree.list_files.
2239
prefix = from_dir + '/'
2240
entries = self.iter_entries_by_dir()
2241
for path, entry in entries:
2242
if entry.name == '' and not include_root:
2245
if not path.startswith(prefix):
2247
path = path[len(prefix):]
2248
yield path, 'V', entry.kind, entry
2250
if from_dir is None and include_root is True:
2251
root_entry = inventory.make_entry(
2252
'directory', '', ROOT_PARENT, self.get_root_id())
2253
yield '', 'V', 'directory', root_entry
2254
entries = self._iter_entries_for_dir(from_dir or '')
2255
for path, entry in entries:
2256
yield path, 'V', entry.kind, entry
2258
def kind(self, path):
2259
trans_id = self._path2trans_id(path)
2260
if trans_id is None:
2261
raise errors.NoSuchFile(path)
2262
return self._transform.final_kind(trans_id)
2264
def stored_kind(self, path):
2265
trans_id = self._path2trans_id(path)
2266
if trans_id is None:
2267
raise errors.NoSuchFile(path)
2269
return self._transform._new_contents[trans_id]
2271
return self._transform._tree.stored_kind(path)
2273
def get_file_mtime(self, path):
2274
"""See Tree.get_file_mtime"""
2275
file_id = self.path2id(path)
2277
raise errors.NoSuchFile(path)
2278
if not self._content_change(file_id):
2279
return self._transform._tree.get_file_mtime(
2280
self._transform._tree.id2path(file_id))
2281
trans_id = self._path2trans_id(path)
2282
return self._stat_limbo_file(trans_id).st_mtime
2284
def get_file_size(self, path):
2285
"""See Tree.get_file_size"""
2286
trans_id = self._path2trans_id(path)
2287
if trans_id is None:
2288
raise errors.NoSuchFile(path)
2289
kind = self._transform.final_kind(trans_id)
2292
if trans_id in self._transform._new_contents:
2293
return self._stat_limbo_file(trans_id).st_size
2294
if self.kind(path) == 'file':
2295
return self._transform._tree.get_file_size(path)
2299
def get_file_verifier(self, path, stat_value=None):
2300
trans_id = self._path2trans_id(path)
2301
if trans_id is None:
2302
raise errors.NoSuchFile(path)
2303
kind = self._transform._new_contents.get(trans_id)
2305
return self._transform._tree.get_file_verifier(path)
2307
with self.get_file(path) as fileobj:
2308
return ("SHA1", sha_file(fileobj))
2310
def get_file_sha1(self, path, stat_value=None):
2311
trans_id = self._path2trans_id(path)
2312
if trans_id is None:
2313
raise errors.NoSuchFile(path)
2314
kind = self._transform._new_contents.get(trans_id)
2316
return self._transform._tree.get_file_sha1(path)
2318
with self.get_file(path) as fileobj:
2319
return sha_file(fileobj)
2321
def is_executable(self, path):
2322
trans_id = self._path2trans_id(path)
2323
if trans_id is None:
2326
return self._transform._new_executability[trans_id]
2329
return self._transform._tree.is_executable(path)
2330
except OSError as e:
2331
if e.errno == errno.ENOENT:
2334
except errors.NoSuchFile:
2337
def has_filename(self, path):
2338
trans_id = self._path2trans_id(path)
2339
if trans_id in self._transform._new_contents:
2341
elif trans_id in self._transform._removed_contents:
2344
return self._transform._tree.has_filename(path)
2346
def path_content_summary(self, path):
2347
trans_id = self._path2trans_id(path)
2348
tt = self._transform
2349
tree_path = tt._tree_id_paths.get(trans_id)
2350
kind = tt._new_contents.get(trans_id)
2352
if tree_path is None or trans_id in tt._removed_contents:
2353
return 'missing', None, None, None
2354
summary = tt._tree.path_content_summary(tree_path)
2355
kind, size, executable, link_or_sha1 = summary
2358
limbo_name = tt._limbo_name(trans_id)
2359
if trans_id in tt._new_reference_revision:
2360
kind = 'tree-reference'
2362
statval = os.lstat(limbo_name)
2363
size = statval.st_size
2364
if not tt._limbo_supports_executable():
2367
executable = statval.st_mode & S_IEXEC
2371
if kind == 'symlink':
2372
link_or_sha1 = os.readlink(limbo_name)
2373
if not isinstance(link_or_sha1, text_type):
2374
link_or_sha1 = link_or_sha1.decode(osutils._fs_enc)
2375
executable = tt._new_executability.get(trans_id, executable)
2376
return kind, size, executable, link_or_sha1
2378
def iter_changes(self, from_tree, include_unchanged=False,
2379
specific_files=None, pb=None, extra_trees=None,
2380
require_versioned=True, want_unversioned=False):
2381
"""See InterTree.iter_changes.
2383
This has a fast path that is only used when the from_tree matches
2384
the transform tree, and no fancy options are supplied.
2386
if (from_tree is not self._transform._tree or include_unchanged
2387
or specific_files or want_unversioned):
2388
return tree.InterTree(from_tree, self).iter_changes(
2389
include_unchanged=include_unchanged,
2390
specific_files=specific_files,
2392
extra_trees=extra_trees,
2393
require_versioned=require_versioned,
2394
want_unversioned=want_unversioned)
2395
if want_unversioned:
2396
raise ValueError('want_unversioned is not supported')
2397
return self._transform.iter_changes()
2399
def get_file(self, path):
2400
"""See Tree.get_file"""
2401
file_id = self.path2id(path)
2402
if not self._content_change(file_id):
2403
return self._transform._tree.get_file(path)
2404
trans_id = self._path2trans_id(path)
2405
name = self._transform._limbo_name(trans_id)
2406
return open(name, 'rb')
2408
def get_file_with_stat(self, path):
2409
return self.get_file(path), None
2411
def annotate_iter(self, path,
2412
default_revision=_mod_revision.CURRENT_REVISION):
2413
file_id = self.path2id(path)
2414
changes = self._iter_changes_cache.get(file_id)
2418
changed_content, versioned, kind = (
2419
changes.changed_content, changes.versioned, changes.kind)
2422
get_old = (kind[0] == 'file' and versioned[0])
2424
old_annotation = self._transform._tree.annotate_iter(
2425
path, default_revision=default_revision)
2429
return old_annotation
2430
if not changed_content:
2431
return old_annotation
2432
# TODO: This is doing something similar to what WT.annotate_iter is
2433
# doing, however it fails slightly because it doesn't know what
2434
# the *other* revision_id is, so it doesn't know how to give the
2435
# other as the origin for some lines, they all get
2436
# 'default_revision'
2437
# It would be nice to be able to use the new Annotator based
2438
# approach, as well.
2439
return annotate.reannotate([old_annotation],
2440
self.get_file(path).readlines(),
2443
def get_symlink_target(self, path):
2444
"""See Tree.get_symlink_target"""
2445
file_id = self.path2id(path)
2446
if not self._content_change(file_id):
2447
return self._transform._tree.get_symlink_target(path)
2448
trans_id = self._path2trans_id(path)
2449
name = self._transform._limbo_name(trans_id)
2450
return osutils.readlink(name)
2452
def walkdirs(self, prefix=''):
2453
pending = [self._transform.root]
2454
while len(pending) > 0:
2455
parent_id = pending.pop()
2458
prefix = prefix.rstrip('/')
2459
parent_path = self._final_paths.get_path(parent_id)
2460
parent_file_id = self._transform.final_file_id(parent_id)
2461
for child_id in self._all_children(parent_id):
2462
path_from_root = self._final_paths.get_path(child_id)
2463
basename = self._transform.final_name(child_id)
2464
file_id = self._transform.final_file_id(child_id)
2465
kind = self._transform.final_kind(child_id)
2466
if kind is not None:
2467
versioned_kind = kind
2470
versioned_kind = self._transform._tree.stored_kind(
2471
self._transform._tree.id2path(file_id))
2472
if versioned_kind == 'directory':
2473
subdirs.append(child_id)
2474
children.append((path_from_root, basename, kind, None,
2475
file_id, versioned_kind))
2477
if parent_path.startswith(prefix):
2478
yield (parent_path, parent_file_id), children
2479
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2482
def get_parent_ids(self):
2483
return self._parent_ids
2485
def set_parent_ids(self, parent_ids):
2486
self._parent_ids = parent_ids
2488
def get_revision_tree(self, revision_id):
2489
return self._transform._tree.get_revision_tree(revision_id)
864
2492
def joinpath(parent, child):
865
2493
"""Join tree-relative paths, handling the tree root specially"""
866
2494
if parent is None or parent == "":
896
2525
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2526
return self._known_paths[trans_id]
2528
def get_paths(self, trans_ids):
2529
return [(self.get_path(t), t) for t in trans_ids]
899
2532
def topology_sorted_ids(tree):
900
2533
"""Determine the topological order of the ids in a tree"""
901
2534
file_ids = list(tree)
902
2535
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2539
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2540
delta_from_tree=False):
2541
"""Create working tree for a branch, using a TreeTransform.
2543
This function should be used on empty trees, having a tree root at most.
2544
(see merge and revert functionality for working with existing trees)
2546
Existing files are handled like so:
2548
- Existing bzrdirs take precedence over creating new items. They are
2549
created as '%s.diverted' % name.
2550
- Otherwise, if the content on disk matches the content we are building,
2551
it is silently replaced.
2552
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2554
:param tree: The tree to convert wt into a copy of
2555
:param wt: The working tree that files will be placed into
2556
:param accelerator_tree: A tree which can be used for retrieving file
2557
contents more quickly than tree itself, i.e. a workingtree. tree
2558
will be used for cases where accelerator_tree's content is different.
2559
:param hardlink: If true, hard-link files to accelerator_tree, where
2560
possible. accelerator_tree must implement abspath, i.e. be a
2562
:param delta_from_tree: If true, build_tree may use the input Tree to
2563
generate the inventory delta.
2565
with wt.lock_tree_write(), tree.lock_read():
2566
if accelerator_tree is not None:
2567
accelerator_tree.lock_read()
2569
return _build_tree(tree, wt, accelerator_tree, hardlink,
2572
if accelerator_tree is not None:
2573
accelerator_tree.unlock()
2576
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2577
"""See build_tree."""
2578
for num, _unused in enumerate(wt.all_versioned_paths()):
2579
if num > 0: # more than just a root
2580
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2581
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2582
top_pb = ui.ui_factory.nested_progress_bar()
909
2583
pp = ProgressPhase("Build phase", 2, top_pb)
2584
if tree.get_root_id() is not None:
2585
# This is kind of a hack: we should be altering the root
2586
# as part of the regular tree shape diff logic.
2587
# The conditional test here is to avoid doing an
2588
# expensive operation (flush) every time the root id
2589
# is set within the tree, nor setting the root and thus
2590
# marking the tree as dirty, because we use two different
2591
# idioms here: tree interfaces and inventory interfaces.
2592
if wt.get_root_id() != tree.get_root_id():
2593
wt.set_root_id(tree.get_root_id())
910
2595
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2599
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_path('')
2600
with ui.ui_factory.nested_progress_bar() as pb:
2601
deferred_contents = []
2603
total = len(tree.all_versioned_paths())
2605
precomputed_delta = []
2607
precomputed_delta = None
2608
# Check if tree inventory has content. If so, we populate
2609
# existing_files with the directory content. If there are no
2610
# entries we skip populating existing_files as its not used.
2611
# This improves performance and unncessary work on large
2612
# directory trees. (#501307)
2614
existing_files = set()
2615
for dir, files in wt.walkdirs():
2616
existing_files.update(f[0] for f in files)
2617
for num, (tree_path, entry) in \
2618
enumerate(tree.iter_entries_by_dir()):
2619
pb.update(gettext("Building tree"), num
2620
- len(deferred_contents), total)
920
2621
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2624
file_id = entry.file_id
2626
precomputed_delta.append((None, tree_path, file_id, entry))
2627
if tree_path in existing_files:
2628
target_path = wt.abspath(tree_path)
2629
kind = file_kind(target_path)
2630
if kind == "directory":
2632
controldir.ControlDir.open(target_path)
2633
except errors.NotBranchError:
2637
if (file_id not in divert
2639
tree, entry, tree_path, file_id, kind,
2641
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2642
if kind == 'directory':
924
2644
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2645
if entry.kind == 'file':
2646
# We *almost* replicate new_by_entry, so that we can defer
2647
# getting the file text, and get them all at once.
2648
trans_id = tt.create_path(entry.name, parent_id)
2649
file_trans_id[file_id] = trans_id
2650
tt.version_file(file_id, trans_id)
2651
executable = tree.is_executable(tree_path)
2653
tt.set_executability(executable, trans_id)
2654
trans_data = (trans_id, file_id,
2655
tree_path, entry.text_sha1)
2656
deferred_contents.append((tree_path, trans_data))
2658
file_trans_id[file_id] = new_by_entry(
2659
tree_path, tt, entry, parent_id, tree)
2661
new_trans_id = file_trans_id[file_id]
2662
old_parent = tt.trans_id_tree_path(tree_path)
2663
_reparent_children(tt, old_parent, new_trans_id)
2664
offset = num + 1 - len(deferred_contents)
2665
_create_files(tt, tree, deferred_contents, pb, offset,
2666
accelerator_tree, hardlink)
2668
divert_trans = set(file_trans_id[f] for f in divert)
2671
return resolve_checkout(t, c, divert_trans)
2672
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2673
if len(raw_conflicts) > 0:
2674
precomputed_delta = None
2675
conflicts = cook_conflicts(raw_conflicts, tt)
2676
for conflict in conflicts:
2677
trace.warning(text_type(conflict))
2679
wt.add_conflicts(conflicts)
2680
except errors.UnsupportedOperation:
2682
result = tt.apply(no_conflicts=True,
2683
precomputed_delta=precomputed_delta)
933
2686
top_pb.finished()
935
def new_by_entry(tt, entry, parent_id, tree):
2690
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2692
total = len(desired_files) + offset
2694
if accelerator_tree is None:
2695
new_desired_files = desired_files
2697
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2699
change.path for change in iter
2700
if not (change.changed_content or change.executable[0] != change.executable[1])]
2701
if accelerator_tree.supports_content_filtering():
2702
unchanged = [(tp, ap) for (tp, ap) in unchanged
2703
if not next(accelerator_tree.iter_search_rules([ap]))]
2704
unchanged = dict(unchanged)
2705
new_desired_files = []
2707
for unused_tree_path, (trans_id, file_id, tree_path, text_sha1) in desired_files:
2708
accelerator_path = unchanged.get(tree_path)
2709
if accelerator_path is None:
2710
new_desired_files.append((tree_path,
2711
(trans_id, file_id, tree_path, text_sha1)))
2713
pb.update(gettext('Adding file contents'), count + offset, total)
2715
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2718
with accelerator_tree.get_file(accelerator_path) as f:
2719
chunks = osutils.file_iterator(f)
2720
if wt.supports_content_filtering():
2721
filters = wt._content_filter_stack(tree_path)
2722
chunks = filtered_output_bytes(chunks, filters,
2723
ContentFilterContext(tree_path, tree))
2724
tt.create_file(chunks, trans_id, sha1=text_sha1)
2727
for count, ((trans_id, file_id, tree_path, text_sha1), contents) in enumerate(
2728
tree.iter_files_bytes(new_desired_files)):
2729
if wt.supports_content_filtering():
2730
filters = wt._content_filter_stack(tree_path)
2731
contents = filtered_output_bytes(contents, filters,
2732
ContentFilterContext(tree_path, tree))
2733
tt.create_file(contents, trans_id, sha1=text_sha1)
2734
pb.update(gettext('Adding file contents'), count + offset, total)
2737
def _reparent_children(tt, old_parent, new_parent):
2738
for child in tt.iter_tree_children(old_parent):
2739
tt.adjust_path(tt.final_name(child), new_parent, child)
2742
def _reparent_transform_children(tt, old_parent, new_parent):
2743
by_parent = tt.by_parent()
2744
for child in by_parent[old_parent]:
2745
tt.adjust_path(tt.final_name(child), new_parent, child)
2746
return by_parent[old_parent]
2749
def _content_match(tree, entry, tree_path, file_id, kind, target_path):
2750
if entry.kind != kind:
2752
if entry.kind == "directory":
2754
if entry.kind == "file":
2755
with open(target_path, 'rb') as f1, \
2756
tree.get_file(tree_path) as f2:
2757
if osutils.compare_files(f1, f2):
2759
elif entry.kind == "symlink":
2760
if tree.get_symlink_target(tree_path) == os.readlink(target_path):
2765
def resolve_checkout(tt, conflicts, divert):
2766
new_conflicts = set()
2767
for c_type, conflict in ((c[0], c) for c in conflicts):
2768
# Anything but a 'duplicate' would indicate programmer error
2769
if c_type != 'duplicate':
2770
raise AssertionError(c_type)
2771
# Now figure out which is new and which is old
2772
if tt.new_contents(conflict[1]):
2773
new_file = conflict[1]
2774
old_file = conflict[2]
2776
new_file = conflict[2]
2777
old_file = conflict[1]
2779
# We should only get here if the conflict wasn't completely
2781
final_parent = tt.final_parent(old_file)
2782
if new_file in divert:
2783
new_name = tt.final_name(old_file) + '.diverted'
2784
tt.adjust_path(new_name, final_parent, new_file)
2785
new_conflicts.add((c_type, 'Diverted to',
2786
new_file, old_file))
2788
new_name = tt.final_name(old_file) + '.moved'
2789
tt.adjust_path(new_name, final_parent, old_file)
2790
new_conflicts.add((c_type, 'Moved existing file to',
2791
old_file, new_file))
2792
return new_conflicts
2795
def new_by_entry(path, tt, entry, parent_id, tree):
936
2796
"""Create a new file according to its inventory entry"""
937
2797
name = entry.name
938
2798
kind = entry.kind
939
2799
if kind == 'file':
940
contents = tree.get_file(entry.file_id).readlines()
941
executable = tree.is_executable(entry.file_id)
942
return tt.new_file(name, parent_id, contents, entry.file_id,
944
elif kind == 'directory':
945
return tt.new_directory(name, parent_id, entry.file_id)
2800
with tree.get_file(path) as f:
2801
executable = tree.is_executable(path)
2803
name, parent_id, osutils.file_iterator(f), entry.file_id,
2805
elif kind in ('directory', 'tree-reference'):
2806
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2807
if kind == 'tree-reference':
2808
tt.set_tree_reference(entry.reference_revision, trans_id)
946
2810
elif kind == 'symlink':
947
target = tree.get_symlink_target(entry.file_id)
2811
target = tree.get_symlink_target(path)
948
2812
return tt.new_symlink(name, parent_id, target, entry.file_id)
950
def create_by_entry(tt, entry, tree, trans_id, lines=None, mode_id=None):
951
"""Create new file contents according to an inventory entry."""
952
if entry.kind == "file":
954
lines = tree.get_file(entry.file_id).readlines()
955
tt.create_file(lines, trans_id, mode_id=mode_id)
956
elif entry.kind == "symlink":
957
tt.create_symlink(tree.get_symlink_target(entry.file_id), trans_id)
958
elif entry.kind == "directory":
2814
raise errors.BadFileKindError(name, kind)
2817
def create_from_tree(tt, trans_id, tree, path, file_id=None, chunks=None,
2818
filter_tree_path=None):
2819
"""Create new file contents according to tree contents.
2821
:param filter_tree_path: the tree path to use to lookup
2822
content filters to apply to the bytes output in the working tree.
2823
This only applies if the working tree supports content filtering.
2825
kind = tree.kind(path)
2826
if kind == 'directory':
959
2827
tt.create_directory(trans_id)
2828
elif kind == "file":
2830
f = tree.get_file(path)
2831
chunks = osutils.file_iterator(f)
2836
if wt.supports_content_filtering() and filter_tree_path is not None:
2837
filters = wt._content_filter_stack(filter_tree_path)
2838
chunks = filtered_output_bytes(
2840
ContentFilterContext(filter_tree_path, tree))
2841
tt.create_file(chunks, trans_id)
2845
elif kind == "symlink":
2846
tt.create_symlink(tree.get_symlink_target(path), trans_id)
2848
raise AssertionError('Unknown kind %r' % kind)
961
2851
def create_entry_executability(tt, entry, trans_id):
962
2852
"""Set the executability of a trans_id according to an inventory entry"""
964
2854
tt.set_executability(entry.executable, trans_id)
967
def find_interesting(working_tree, target_tree, filenames):
968
"""Find the ids corresponding to specified filenames."""
970
interesting_ids = None
972
interesting_ids = set()
973
for tree_path in filenames:
975
for tree in (working_tree, target_tree):
976
file_id = tree.inventory.path2id(tree_path)
977
if file_id is not None:
978
interesting_ids.add(file_id)
981
raise NotVersionedError(path=tree_path)
982
return interesting_ids
985
def change_entry(tt, file_id, working_tree, target_tree,
986
trans_id_file_id, backups, trans_id, by_parent):
987
"""Replace a file_id's contents with those from a target tree."""
988
e_trans_id = trans_id_file_id(file_id)
989
entry = target_tree.inventory[file_id]
990
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
996
tt.delete_contents(e_trans_id)
998
parent_trans_id = trans_id_file_id(entry.parent_id)
999
backup_name = get_backup_name(entry, by_parent,
1000
parent_trans_id, tt)
1001
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1002
tt.unversion_file(e_trans_id)
1003
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1004
tt.version_file(file_id, e_trans_id)
1005
trans_id[file_id] = e_trans_id
1006
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1007
create_entry_executability(tt, entry, e_trans_id)
1010
tt.set_executability(entry.executable, e_trans_id)
1011
if tt.final_name(e_trans_id) != entry.name:
1014
parent_id = tt.final_parent(e_trans_id)
1015
parent_file_id = tt.final_file_id(parent_id)
1016
if parent_file_id != entry.parent_id:
1021
parent_trans_id = trans_id_file_id(entry.parent_id)
1022
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
1025
def get_backup_name(entry, by_parent, parent_trans_id, tt):
1026
"""Produce a backup-style name that appears to be available"""
1030
yield "%s.~%d~" % (entry.name, counter)
1032
for name in name_gen():
1033
if not tt.has_named_child(by_parent, parent_trans_id, name):
1036
def _entry_changes(file_id, entry, working_tree):
1037
"""Determine in which ways the inventory entry has changed.
1039
Returns booleans: has_contents, content_mod, meta_mod
1040
has_contents means there are currently contents, but they differ
1041
contents_mod means contents need to be modified
1042
meta_mod means the metadata needs to be modified
1044
cur_entry = working_tree.inventory[file_id]
1046
working_kind = working_tree.kind(file_id)
1049
if e.errno != errno.ENOENT:
1051
has_contents = False
1054
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
1059
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
cur_entry._forget_tree_state()
1065
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2857
def revert(working_tree, target_tree, filenames, backups=False,
2858
pb=None, change_reporter=None):
1070
2859
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
1075
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2860
pb = ui.ui_factory.nested_progress_bar()
2862
with target_tree.lock_read(), TreeTransform(working_tree, pb) as tt:
2863
pp = ProgressPhase("Revert phase", 3, pb)
2864
conflicts, merge_modified = _prepare_revert_transform(
2865
working_tree, target_tree, tt, filenames, backups, pp)
2867
change_reporter = delta._ChangeReporter(
2868
unversioned_filter=working_tree.is_ignored)
2869
delta.report_changes(tt.iter_changes(), change_reporter)
2870
for conflict in conflicts:
2871
trace.warning(text_type(conflict))
2874
if working_tree.supports_merge_modified():
2875
working_tree.set_merge_modified(merge_modified)
2881
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2882
backups, pp, basis_tree=None,
2883
merge_modified=None):
2884
with ui.ui_factory.nested_progress_bar() as child_pb:
2885
if merge_modified is None:
2886
merge_modified = working_tree.merge_modified()
2887
merge_modified = _alter_files(working_tree, target_tree, tt,
2888
child_pb, filenames, backups,
2889
merge_modified, basis_tree)
2890
with ui.ui_factory.nested_progress_bar() as child_pb:
2891
raw_conflicts = resolve_conflicts(
2892
tt, child_pb, lambda t, c: conflict_pass(t, c, target_tree))
2893
conflicts = cook_conflicts(raw_conflicts, tt)
2894
return conflicts, merge_modified
2897
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2898
backups, merge_modified, basis_tree=None):
2899
if basis_tree is not None:
2900
basis_tree.lock_read()
2901
# We ask the working_tree for its changes relative to the target, rather
2902
# than the target changes relative to the working tree. Because WT4 has an
2903
# optimizer to compare itself to a target, but no optimizer for the
2905
change_list = working_tree.iter_changes(
2906
target_tree, specific_files=specific_files, pb=pb)
2907
if not target_tree.is_versioned(u''):
2913
for id_num, change in enumerate(change_list):
2914
file_id = change.file_id
2915
target_path, wt_path = change.path
2916
target_versioned, wt_versioned = change.versioned
2917
target_parent, wt_parent = change.parent_id
2918
target_name, wt_name = change.name
2919
target_kind, wt_kind = change.kind
2920
target_executable, wt_executable = change.executable
2921
if skip_root and wt_parent is None:
2923
trans_id = tt.trans_id_file_id(file_id)
2925
if change.changed_content:
2926
keep_content = False
2927
if wt_kind == 'file' and (backups or target_kind is None):
2928
wt_sha1 = working_tree.get_file_sha1(wt_path)
2929
if merge_modified.get(file_id) != wt_sha1:
2930
# acquire the basis tree lazily to prevent the
2931
# expense of accessing it when it's not needed ?
2932
# (Guessing, RBC, 200702)
2933
if basis_tree is None:
2934
basis_tree = working_tree.basis_tree()
2935
basis_tree.lock_read()
2936
basis_path = find_previous_path(
2937
working_tree, basis_tree, wt_path)
2938
if basis_path is None:
2939
if target_kind is None and not target_versioned:
2942
if wt_sha1 != basis_tree.get_file_sha1(basis_path):
2944
if wt_kind is not None:
2945
if not keep_content:
1121
2946
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2947
elif target_kind is not None:
2948
parent_trans_id = tt.trans_id_file_id(wt_parent)
2949
backup_name = tt._available_backup_name(
2950
wt_name, parent_trans_id)
2951
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2952
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2953
if wt_versioned and target_versioned:
2954
tt.unversion_file(trans_id)
2955
tt.version_file(file_id, new_trans_id)
2956
# New contents should have the same unix perms as old
2959
trans_id = new_trans_id
2960
if target_kind in ('directory', 'tree-reference'):
2961
tt.create_directory(trans_id)
2962
if target_kind == 'tree-reference':
2963
revision = target_tree.get_reference_revision(
2965
tt.set_tree_reference(revision, trans_id)
2966
elif target_kind == 'symlink':
2967
tt.create_symlink(target_tree.get_symlink_target(
2968
target_path), trans_id)
2969
elif target_kind == 'file':
2970
deferred_files.append(
2971
(target_path, (trans_id, mode_id, file_id)))
2972
if basis_tree is None:
2973
basis_tree = working_tree.basis_tree()
2974
basis_tree.lock_read()
2975
new_sha1 = target_tree.get_file_sha1(target_path)
2976
basis_path = find_previous_path(target_tree, basis_tree, target_path)
2977
if (basis_path is not None and
2978
new_sha1 == basis_tree.get_file_sha1(basis_path)):
2979
if file_id in merge_modified:
2980
del merge_modified[file_id]
2982
merge_modified[file_id] = new_sha1
2984
# preserve the execute bit when backing up
2985
if keep_content and wt_executable == target_executable:
2986
tt.set_executability(target_executable, trans_id)
2987
elif target_kind is not None:
2988
raise AssertionError(target_kind)
2989
if not wt_versioned and target_versioned:
2990
tt.version_file(file_id, trans_id)
2991
if wt_versioned and not target_versioned:
2992
tt.unversion_file(trans_id)
2993
if (target_name is not None
2994
and (wt_name != target_name or wt_parent != target_parent)):
2995
if target_name == '' and target_parent is None:
2996
parent_trans = ROOT_PARENT
2998
parent_trans = tt.trans_id_file_id(target_parent)
2999
if wt_parent is None and wt_versioned:
3000
tt.adjust_root_path(target_name, parent_trans)
3002
tt.adjust_path(target_name, parent_trans, trans_id)
3003
if wt_executable != target_executable and target_kind == "file":
3004
tt.set_executability(target_executable, trans_id)
3005
if working_tree.supports_content_filtering():
3006
for (trans_id, mode_id, file_id), bytes in (
3007
target_tree.iter_files_bytes(deferred_files)):
3008
# We're reverting a tree to the target tree so using the
3009
# target tree to find the file path seems the best choice
3010
# here IMO - Ian C 27/Oct/2009
3011
filter_tree_path = target_tree.id2path(file_id)
3012
filters = working_tree._content_filter_stack(filter_tree_path)
3013
bytes = filtered_output_bytes(
3015
ContentFilterContext(filter_tree_path, working_tree))
3016
tt.create_file(bytes, trans_id, mode_id)
3018
for (trans_id, mode_id, file_id), bytes in target_tree.iter_files_bytes(
3020
tt.create_file(bytes, trans_id, mode_id)
3021
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
3023
if basis_tree is not None:
3025
return merge_modified
3028
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3029
"""Make many conflict-resolution attempts, but die if they fail"""
3030
if pass_func is None:
3031
pass_func = conflict_pass
1145
3032
new_conflicts = set()
3033
with ui.ui_factory.nested_progress_bar() as pb:
1147
3034
for n in range(10):
1148
pb.update('Resolution pass', n+1, 10)
3035
pb.update(gettext('Resolution pass'), n + 1, 10)
1149
3036
conflicts = tt.find_conflicts()
1150
3037
if len(conflicts) == 0:
1151
3038
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3039
new_conflicts.update(pass_func(tt, conflicts))
1153
3040
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3043
def conflict_pass(tt, conflicts, path_tree=None):
3044
"""Resolve some classes of conflicts.
3046
:param tt: The transform to resolve conflicts in
3047
:param conflicts: The conflicts to resolve
3048
:param path_tree: A Tree to get supplemental paths from
1160
3050
new_conflicts = set()
1161
3051
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3052
if c_type == 'duplicate id':