861
828
self.create_symlink(target, trans_id)
831
def new_orphan(self, trans_id, parent_id):
832
"""Schedule an item to be orphaned.
834
When a directory is about to be removed, its children, if they are not
835
versioned are moved out of the way: they don't have a parent anymore.
837
:param trans_id: The trans_id of the existing item.
838
:param parent_id: The parent trans_id of the item.
840
raise NotImplementedError(self.new_orphan)
842
def _get_potential_orphans(self, dir_id):
843
"""Find the potential orphans in a directory.
845
A directory can't be safely deleted if there are versioned files in it.
846
If all the contained files are unversioned then they can be orphaned.
848
The 'None' return value means that the directory contains at least one
849
versioned file and should not be deleted.
851
:param dir_id: The directory trans id.
853
:return: A list of the orphan trans ids or None if at least one
854
versioned file is present.
857
# Find the potential orphans, stop if one item should be kept
858
for child_tid in self.by_parent()[dir_id]:
859
if child_tid in self._removed_contents:
860
# The child is removed as part of the transform. Since it was
861
# versioned before, it's not an orphan
863
elif self.final_file_id(child_tid) is None:
864
# The child is not versioned
865
orphans.append(child_tid)
867
# We have a versioned file here, searching for orphans is
873
def _affected_ids(self):
874
"""Return the set of transform ids affected by the transform"""
875
trans_ids = set(self._removed_id)
876
trans_ids.update(self._new_id.keys())
877
trans_ids.update(self._removed_contents)
878
trans_ids.update(self._new_contents.keys())
879
trans_ids.update(self._new_executability.keys())
880
trans_ids.update(self._new_name.keys())
881
trans_ids.update(self._new_parent.keys())
884
def _get_file_id_maps(self):
885
"""Return mapping of file_ids to trans_ids in the to and from states"""
886
trans_ids = self._affected_ids()
889
# Build up two dicts: trans_ids associated with file ids in the
890
# FROM state, vs the TO state.
891
for trans_id in trans_ids:
892
from_file_id = self.tree_file_id(trans_id)
893
if from_file_id is not None:
894
from_trans_ids[from_file_id] = trans_id
895
to_file_id = self.final_file_id(trans_id)
896
if to_file_id is not None:
897
to_trans_ids[to_file_id] = trans_id
898
return from_trans_ids, to_trans_ids
900
def _from_file_data(self, from_trans_id, from_versioned, file_id):
901
"""Get data about a file in the from (tree) state
903
Return a (name, parent, kind, executable) tuple
905
from_path = self._tree_id_paths.get(from_trans_id)
907
# get data from working tree if versioned
908
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
909
from_name = from_entry.name
910
from_parent = from_entry.parent_id
913
if from_path is None:
914
# File does not exist in FROM state
918
# File exists, but is not versioned. Have to use path-
920
from_name = os.path.basename(from_path)
921
tree_parent = self.get_tree_parent(from_trans_id)
922
from_parent = self.tree_file_id(tree_parent)
923
if from_path is not None:
924
from_kind, from_executable, from_stats = \
925
self._tree._comparison_data(from_entry, from_path)
928
from_executable = False
929
return from_name, from_parent, from_kind, from_executable
931
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
932
"""Get data about a file in the to (target) state
934
Return a (name, parent, kind, executable) tuple
936
to_name = self.final_name(to_trans_id)
937
to_kind = self.final_kind(to_trans_id)
938
to_parent = self.final_file_id(self.final_parent(to_trans_id))
939
if to_trans_id in self._new_executability:
940
to_executable = self._new_executability[to_trans_id]
941
elif to_trans_id == from_trans_id:
942
to_executable = from_executable
944
to_executable = False
945
return to_name, to_parent, to_kind, to_executable
947
def iter_changes(self):
948
"""Produce output in the same format as Tree.iter_changes.
950
Will produce nonsensical results if invoked while inventory/filesystem
951
conflicts (as reported by TreeTransform.find_conflicts()) are present.
953
This reads the Transform, but only reproduces changes involving a
954
file_id. Files that are not versioned in either of the FROM or TO
955
states are not reflected.
957
final_paths = FinalPaths(self)
958
from_trans_ids, to_trans_ids = self._get_file_id_maps()
960
# Now iterate through all active file_ids
961
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
963
from_trans_id = from_trans_ids.get(file_id)
964
# find file ids, and determine versioning state
965
if from_trans_id is None:
966
from_versioned = False
967
from_trans_id = to_trans_ids[file_id]
969
from_versioned = True
970
to_trans_id = to_trans_ids.get(file_id)
971
if to_trans_id is None:
973
to_trans_id = from_trans_id
977
from_name, from_parent, from_kind, from_executable = \
978
self._from_file_data(from_trans_id, from_versioned, file_id)
980
to_name, to_parent, to_kind, to_executable = \
981
self._to_file_data(to_trans_id, from_trans_id, from_executable)
983
if not from_versioned:
986
from_path = self._tree_id_paths.get(from_trans_id)
990
to_path = final_paths.get_path(to_trans_id)
991
if from_kind != to_kind:
993
elif to_kind in ('file', 'symlink') and (
994
to_trans_id != from_trans_id or
995
to_trans_id in self._new_contents):
997
if (not modified and from_versioned == to_versioned and
998
from_parent==to_parent and from_name == to_name and
999
from_executable == to_executable):
1001
results.append((file_id, (from_path, to_path), modified,
1002
(from_versioned, to_versioned),
1003
(from_parent, to_parent),
1004
(from_name, to_name),
1005
(from_kind, to_kind),
1006
(from_executable, to_executable)))
1007
return iter(sorted(results, key=lambda x:x[1]))
1009
def get_preview_tree(self):
1010
"""Return a tree representing the result of the transform.
1012
The tree is a snapshot, and altering the TreeTransform will invalidate
1015
return _PreviewTree(self)
1017
def commit(self, branch, message, merge_parents=None, strict=False,
1018
timestamp=None, timezone=None, committer=None, authors=None,
1019
revprops=None, revision_id=None):
1020
"""Commit the result of this TreeTransform to a branch.
1022
:param branch: The branch to commit to.
1023
:param message: The message to attach to the commit.
1024
:param merge_parents: Additional parent revision-ids specified by
1026
:param strict: If True, abort the commit if there are unversioned
1028
:param timestamp: if not None, seconds-since-epoch for the time and
1029
date. (May be a float.)
1030
:param timezone: Optional timezone for timestamp, as an offset in
1032
:param committer: Optional committer in email-id format.
1033
(e.g. "J Random Hacker <jrandom@example.com>")
1034
:param authors: Optional list of authors in email-id format.
1035
:param revprops: Optional dictionary of revision properties.
1036
:param revision_id: Optional revision id. (Specifying a revision-id
1037
may reduce performance for some non-native formats.)
1038
:return: The revision_id of the revision committed.
1040
self._check_malformed()
1042
unversioned = set(self._new_contents).difference(set(self._new_id))
1043
for trans_id in unversioned:
1044
if self.final_file_id(trans_id) is None:
1045
raise errors.StrictCommitFailed()
1047
revno, last_rev_id = branch.last_revision_info()
1048
if last_rev_id == _mod_revision.NULL_REVISION:
1049
if merge_parents is not None:
1050
raise ValueError('Cannot supply merge parents for first'
1054
parent_ids = [last_rev_id]
1055
if merge_parents is not None:
1056
parent_ids.extend(merge_parents)
1057
if self._tree.get_revision_id() != last_rev_id:
1058
raise ValueError('TreeTransform not based on branch basis: %s' %
1059
self._tree.get_revision_id())
1060
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1061
builder = branch.get_commit_builder(parent_ids,
1062
timestamp=timestamp,
1064
committer=committer,
1066
revision_id=revision_id)
1067
preview = self.get_preview_tree()
1068
list(builder.record_iter_changes(preview, last_rev_id,
1069
self.iter_changes()))
1070
builder.finish_inventory()
1071
revision_id = builder.commit(message)
1072
branch.set_last_revision_info(revno + 1, revision_id)
1075
def _text_parent(self, trans_id):
1076
file_id = self.tree_file_id(trans_id)
1078
if file_id is None or self._tree.kind(file_id) != 'file':
1080
except errors.NoSuchFile:
1084
def _get_parents_texts(self, trans_id):
1085
"""Get texts for compression parents of this file."""
1086
file_id = self._text_parent(trans_id)
1089
return (self._tree.get_file_text(file_id),)
1091
def _get_parents_lines(self, trans_id):
1092
"""Get lines for compression parents of this file."""
1093
file_id = self._text_parent(trans_id)
1096
return (self._tree.get_file_lines(file_id),)
1098
def serialize(self, serializer):
1099
"""Serialize this TreeTransform.
1101
:param serializer: A Serialiser like pack.ContainerSerializer.
1103
new_name = dict((k, v.encode('utf-8')) for k, v in
1104
self._new_name.items())
1105
new_executability = dict((k, int(v)) for k, v in
1106
self._new_executability.items())
1107
tree_path_ids = dict((k.encode('utf-8'), v)
1108
for k, v in self._tree_path_ids.items())
1110
'_id_number': self._id_number,
1111
'_new_name': new_name,
1112
'_new_parent': self._new_parent,
1113
'_new_executability': new_executability,
1114
'_new_id': self._new_id,
1115
'_tree_path_ids': tree_path_ids,
1116
'_removed_id': list(self._removed_id),
1117
'_removed_contents': list(self._removed_contents),
1118
'_non_present_ids': self._non_present_ids,
1120
yield serializer.bytes_record(bencode.bencode(attribs),
1122
for trans_id, kind in self._new_contents.items():
1124
lines = osutils.chunks_to_lines(
1125
self._read_file_chunks(trans_id))
1126
parents = self._get_parents_lines(trans_id)
1127
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1128
content = ''.join(mpdiff.to_patch())
1129
if kind == 'directory':
1131
if kind == 'symlink':
1132
content = self._read_symlink_target(trans_id)
1133
yield serializer.bytes_record(content, ((trans_id, kind),))
1135
def deserialize(self, records):
1136
"""Deserialize a stored TreeTransform.
1138
:param records: An iterable of (names, content) tuples, as per
1139
pack.ContainerPushParser.
1141
names, content = records.next()
1142
attribs = bencode.bdecode(content)
1143
self._id_number = attribs['_id_number']
1144
self._new_name = dict((k, v.decode('utf-8'))
1145
for k, v in attribs['_new_name'].items())
1146
self._new_parent = attribs['_new_parent']
1147
self._new_executability = dict((k, bool(v)) for k, v in
1148
attribs['_new_executability'].items())
1149
self._new_id = attribs['_new_id']
1150
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1151
self._tree_path_ids = {}
1152
self._tree_id_paths = {}
1153
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1154
path = bytepath.decode('utf-8')
1155
self._tree_path_ids[path] = trans_id
1156
self._tree_id_paths[trans_id] = path
1157
self._removed_id = set(attribs['_removed_id'])
1158
self._removed_contents = set(attribs['_removed_contents'])
1159
self._non_present_ids = attribs['_non_present_ids']
1160
for ((trans_id, kind),), content in records:
1162
mpdiff = multiparent.MultiParent.from_patch(content)
1163
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1164
self.create_file(lines, trans_id)
1165
if kind == 'directory':
1166
self.create_directory(trans_id)
1167
if kind == 'symlink':
1168
self.create_symlink(content.decode('utf-8'), trans_id)
1171
class DiskTreeTransform(TreeTransformBase):
1172
"""Tree transform storing its contents on disk."""
1174
def __init__(self, tree, limbodir, pb=None,
1175
case_sensitive=True):
1177
:param tree: The tree that will be transformed, but not necessarily
1179
:param limbodir: A directory where new files can be stored until
1180
they are installed in their proper places
1182
:param case_sensitive: If True, the target of the transform is
1183
case sensitive, not just case preserving.
1185
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1186
self._limbodir = limbodir
1187
self._deletiondir = None
1188
# A mapping of transform ids to their limbo filename
1189
self._limbo_files = {}
1190
self._possibly_stale_limbo_files = set()
1191
# A mapping of transform ids to a set of the transform ids of children
1192
# that their limbo directory has
1193
self._limbo_children = {}
1194
# Map transform ids to maps of child filename to child transform id
1195
self._limbo_children_names = {}
1196
# List of transform ids that need to be renamed from limbo into place
1197
self._needs_rename = set()
1198
self._creation_mtime = None
1201
"""Release the working tree lock, if held, clean up limbo dir.
1203
This is required if apply has not been invoked, but can be invoked
1206
if self._tree is None:
1209
limbo_paths = self._limbo_files.values() + list(
1210
self._possibly_stale_limbo_files)
1211
limbo_paths = sorted(limbo_paths, reverse=True)
1212
for path in limbo_paths:
1216
if e.errno != errno.ENOENT:
1218
# XXX: warn? perhaps we just got interrupted at an
1219
# inconvenient moment, but perhaps files are disappearing
1222
delete_any(self._limbodir)
1224
# We don't especially care *why* the dir is immortal.
1225
raise ImmortalLimbo(self._limbodir)
1227
if self._deletiondir is not None:
1228
delete_any(self._deletiondir)
1230
raise errors.ImmortalPendingDeletion(self._deletiondir)
1232
TreeTransformBase.finalize(self)
1234
def _limbo_name(self, trans_id):
1235
"""Generate the limbo name of a file"""
1236
limbo_name = self._limbo_files.get(trans_id)
1237
if limbo_name is None:
1238
limbo_name = self._generate_limbo_path(trans_id)
1239
self._limbo_files[trans_id] = limbo_name
1242
def _generate_limbo_path(self, trans_id):
1243
"""Generate a limbo path using the trans_id as the relative path.
1245
This is suitable as a fallback, and when the transform should not be
1246
sensitive to the path encoding of the limbo directory.
1248
self._needs_rename.add(trans_id)
1249
return pathjoin(self._limbodir, trans_id)
1251
def adjust_path(self, name, parent, trans_id):
1252
previous_parent = self._new_parent.get(trans_id)
1253
previous_name = self._new_name.get(trans_id)
1254
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1255
if (trans_id in self._limbo_files and
1256
trans_id not in self._needs_rename):
1257
self._rename_in_limbo([trans_id])
1258
if previous_parent != parent:
1259
self._limbo_children[previous_parent].remove(trans_id)
1260
if previous_parent != parent or previous_name != name:
1261
del self._limbo_children_names[previous_parent][previous_name]
1263
def _rename_in_limbo(self, trans_ids):
1264
"""Fix limbo names so that the right final path is produced.
1266
This means we outsmarted ourselves-- we tried to avoid renaming
1267
these files later by creating them with their final names in their
1268
final parents. But now the previous name or parent is no longer
1269
suitable, so we have to rename them.
1271
Even for trans_ids that have no new contents, we must remove their
1272
entries from _limbo_files, because they are now stale.
1274
for trans_id in trans_ids:
1275
old_path = self._limbo_files[trans_id]
1276
self._possibly_stale_limbo_files.add(old_path)
1277
del self._limbo_files[trans_id]
1278
if trans_id not in self._new_contents:
1280
new_path = self._limbo_name(trans_id)
1281
os.rename(old_path, new_path)
1282
self._possibly_stale_limbo_files.remove(old_path)
1283
for descendant in self._limbo_descendants(trans_id):
1284
desc_path = self._limbo_files[descendant]
1285
desc_path = new_path + desc_path[len(old_path):]
1286
self._limbo_files[descendant] = desc_path
1288
def _limbo_descendants(self, trans_id):
1289
"""Return the set of trans_ids whose limbo paths descend from this."""
1290
descendants = set(self._limbo_children.get(trans_id, []))
1291
for descendant in list(descendants):
1292
descendants.update(self._limbo_descendants(descendant))
1295
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1296
"""Schedule creation of a new file.
1300
:param contents: an iterator of strings, all of which will be written
1301
to the target destination.
1302
:param trans_id: TreeTransform handle
1303
:param mode_id: If not None, force the mode of the target file to match
1304
the mode of the object referenced by mode_id.
1305
Otherwise, we will try to preserve mode bits of an existing file.
1306
:param sha1: If the sha1 of this content is already known, pass it in.
1307
We can use it to prevent future sha1 computations.
1309
name = self._limbo_name(trans_id)
1310
f = open(name, 'wb')
1312
unique_add(self._new_contents, trans_id, 'file')
1313
f.writelines(contents)
1316
self._set_mtime(name)
1317
self._set_mode(trans_id, mode_id, S_ISREG)
1318
# It is unfortunate we have to use lstat instead of fstat, but we just
1319
# used utime and chmod on the file, so we need the accurate final
1321
if sha1 is not None:
1322
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1324
def _read_file_chunks(self, trans_id):
1325
cur_file = open(self._limbo_name(trans_id), 'rb')
1327
return cur_file.readlines()
1331
def _read_symlink_target(self, trans_id):
1332
return os.readlink(self._limbo_name(trans_id))
1334
def _set_mtime(self, path):
1335
"""All files that are created get the same mtime.
1337
This time is set by the first object to be created.
1339
if self._creation_mtime is None:
1340
self._creation_mtime = time.time()
1341
os.utime(path, (self._creation_mtime, self._creation_mtime))
1343
def create_hardlink(self, path, trans_id):
1344
"""Schedule creation of a hard link"""
1345
name = self._limbo_name(trans_id)
1349
if e.errno != errno.EPERM:
1351
raise errors.HardLinkNotSupported(path)
1353
unique_add(self._new_contents, trans_id, 'file')
1355
# Clean up the file, it never got registered so
1356
# TreeTransform.finalize() won't clean it up.
1360
def create_directory(self, trans_id):
1361
"""Schedule creation of a new directory.
1363
See also new_directory.
1365
os.mkdir(self._limbo_name(trans_id))
1366
unique_add(self._new_contents, trans_id, 'directory')
1368
def create_symlink(self, target, trans_id):
1369
"""Schedule creation of a new symbolic link.
1371
target is a bytestring.
1372
See also new_symlink.
1375
os.symlink(target, self._limbo_name(trans_id))
1376
unique_add(self._new_contents, trans_id, 'symlink')
1379
path = FinalPaths(self).get_path(trans_id)
1382
raise UnableCreateSymlink(path=path)
1384
def cancel_creation(self, trans_id):
1385
"""Cancel the creation of new file contents."""
1386
del self._new_contents[trans_id]
1387
if trans_id in self._observed_sha1s:
1388
del self._observed_sha1s[trans_id]
1389
children = self._limbo_children.get(trans_id)
1390
# if this is a limbo directory with children, move them before removing
1392
if children is not None:
1393
self._rename_in_limbo(children)
1394
del self._limbo_children[trans_id]
1395
del self._limbo_children_names[trans_id]
1396
delete_any(self._limbo_name(trans_id))
1398
def new_orphan(self, trans_id, parent_id):
1399
# FIXME: There is no tree config, so we use the branch one (it's weird
1400
# to define it this way as orphaning can only occur in a working tree,
1401
# but that's all we have (for now). It will find the option in
1402
# locations.conf or bazaar.conf though) -- vila 20100916
1403
conf = self._tree.branch.get_config()
1404
conf_var_name = 'bzr.transform.orphan_policy'
1405
orphan_policy = conf.get_user_option(conf_var_name)
1406
default_policy = orphaning_registry.default_key
1407
if orphan_policy is None:
1408
orphan_policy = default_policy
1409
if orphan_policy not in orphaning_registry:
1410
trace.warning('%s (from %s) is not a known policy, defaulting '
1411
'to %s' % (orphan_policy, conf_var_name, default_policy))
1412
orphan_policy = default_policy
1413
handle_orphan = orphaning_registry.get(orphan_policy)
1414
handle_orphan(self, trans_id, parent_id)
1417
class OrphaningError(errors.BzrError):
1419
# Only bugs could lead to such exception being seen by the user
1420
internal_error = True
1421
_fmt = "Error while orphaning %s in %s directory"
1423
def __init__(self, orphan, parent):
1424
errors.BzrError.__init__(self)
1425
self.orphan = orphan
1426
self.parent = parent
1429
class OrphaningForbidden(OrphaningError):
1431
_fmt = "Policy: %s doesn't allow creating orphans."
1433
def __init__(self, policy):
1434
errors.BzrError.__init__(self)
1435
self.policy = policy
1438
def move_orphan(tt, orphan_id, parent_id):
1439
"""See TreeTransformBase.new_orphan.
1441
This creates a new orphan in the `bzr-orphans` dir at the root of the
1444
:param tt: The TreeTransform orphaning `trans_id`.
1446
:param orphan_id: The trans id that should be orphaned.
1448
:param parent_id: The orphan parent trans id.
1450
# Add the orphan dir if it doesn't exist
1451
orphan_dir_basename = 'bzr-orphans'
1452
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1453
if tt.final_kind(od_id) is None:
1454
tt.create_directory(od_id)
1455
parent_path = tt._tree_id_paths[parent_id]
1456
# Find a name that doesn't exist yet in the orphan dir
1457
actual_name = tt.final_name(orphan_id)
1458
new_name = tt._available_backup_name(actual_name, od_id)
1459
tt.adjust_path(new_name, od_id, orphan_id)
1460
trace.warning('%s has been orphaned in %s'
1461
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1464
def refuse_orphan(tt, orphan_id, parent_id):
1465
"""See TreeTransformBase.new_orphan.
1467
This refuses to create orphan, letting the caller handle the conflict.
1469
raise OrphaningForbidden('never')
1472
orphaning_registry = registry.Registry()
1473
orphaning_registry.register(
1474
'conflict', refuse_orphan,
1475
'Leave orphans in place and create a conflict on the directory.')
1476
orphaning_registry.register(
1477
'move', move_orphan,
1478
'Move orphans into the bzr-orphans directory.')
1479
orphaning_registry._set_default_key('conflict')
1482
class TreeTransform(DiskTreeTransform):
1483
"""Represent a tree transformation.
1485
This object is designed to support incremental generation of the transform,
1488
However, it gives optimum performance when parent directories are created
1489
before their contents. The transform is then able to put child files
1490
directly in their parent directory, avoiding later renames.
1492
It is easy to produce malformed transforms, but they are generally
1493
harmless. Attempting to apply a malformed transform will cause an
1494
exception to be raised before any modifications are made to the tree.
1496
Many kinds of malformed transforms can be corrected with the
1497
resolve_conflicts function. The remaining ones indicate programming error,
1498
such as trying to create a file with no path.
1500
Two sets of file creation methods are supplied. Convenience methods are:
1505
These are composed of the low-level methods:
1507
* create_file or create_directory or create_symlink
1511
Transform/Transaction ids
1512
-------------------------
1513
trans_ids are temporary ids assigned to all files involved in a transform.
1514
It's possible, even common, that not all files in the Tree have trans_ids.
1516
trans_ids are used because filenames and file_ids are not good enough
1517
identifiers; filenames change, and not all files have file_ids. File-ids
1518
are also associated with trans-ids, so that moving a file moves its
1521
trans_ids are only valid for the TreeTransform that generated them.
1525
Limbo is a temporary directory use to hold new versions of files.
1526
Files are added to limbo by create_file, create_directory, create_symlink,
1527
and their convenience variants (new_*). Files may be removed from limbo
1528
using cancel_creation. Files are renamed from limbo into their final
1529
location as part of TreeTransform.apply
1531
Limbo must be cleaned up, by either calling TreeTransform.apply or
1532
calling TreeTransform.finalize.
1534
Files are placed into limbo inside their parent directories, where
1535
possible. This reduces subsequent renames, and makes operations involving
1536
lots of files faster. This optimization is only possible if the parent
1537
directory is created *before* creating any of its children, so avoid
1538
creating children before parents, where possible.
1542
This temporary directory is used by _FileMover for storing files that are
1543
about to be deleted. In case of rollback, the files will be restored.
1544
FileMover does not delete files until it is sure that a rollback will not
1547
def __init__(self, tree, pb=None):
1548
"""Note: a tree_write lock is taken on the tree.
1550
Use TreeTransform.finalize() to release the lock (can be omitted if
1551
TreeTransform.apply() called).
1553
tree.lock_tree_write()
1556
limbodir = urlutils.local_path_from_url(
1557
tree._transport.abspath('limbo'))
1558
osutils.ensure_empty_directory_exists(
1560
errors.ExistingLimbo)
1561
deletiondir = urlutils.local_path_from_url(
1562
tree._transport.abspath('pending-deletion'))
1563
osutils.ensure_empty_directory_exists(
1565
errors.ExistingPendingDeletion)
1570
# Cache of realpath results, to speed up canonical_path
1571
self._realpaths = {}
1572
# Cache of relpath results, to speed up canonical_path
1574
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1575
tree.case_sensitive)
1576
self._deletiondir = deletiondir
1578
def canonical_path(self, path):
1579
"""Get the canonical tree-relative path"""
1580
# don't follow final symlinks
1581
abs = self._tree.abspath(path)
1582
if abs in self._relpaths:
1583
return self._relpaths[abs]
1584
dirname, basename = os.path.split(abs)
1585
if dirname not in self._realpaths:
1586
self._realpaths[dirname] = os.path.realpath(dirname)
1587
dirname = self._realpaths[dirname]
1588
abs = pathjoin(dirname, basename)
1589
if dirname in self._relpaths:
1590
relpath = pathjoin(self._relpaths[dirname], basename)
1591
relpath = relpath.rstrip('/\\')
1593
relpath = self._tree.relpath(abs)
1594
self._relpaths[abs] = relpath
1597
def tree_kind(self, trans_id):
1598
"""Determine the file kind in the working tree.
1600
:returns: The file kind or None if the file does not exist
1602
path = self._tree_id_paths.get(trans_id)
1606
return file_kind(self._tree.abspath(path))
1607
except errors.NoSuchFile:
1610
def _set_mode(self, trans_id, mode_id, typefunc):
1611
"""Set the mode of new file contents.
1612
The mode_id is the existing file to get the mode from (often the same
1613
as trans_id). The operation is only performed if there's a mode match
1614
according to typefunc.
1619
old_path = self._tree_id_paths[mode_id]
1623
mode = os.stat(self._tree.abspath(old_path)).st_mode
1625
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1626
# Either old_path doesn't exist, or the parent of the
1627
# target is not a directory (but will be one eventually)
1628
# Either way, we know it doesn't exist *right now*
1629
# See also bug #248448
1634
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1636
def iter_tree_children(self, parent_id):
1637
"""Iterate through the entry's tree children, if any"""
1639
path = self._tree_id_paths[parent_id]
1643
children = os.listdir(self._tree.abspath(path))
1645
if not (osutils._is_error_enotdir(e)
1646
or e.errno in (errno.ENOENT, errno.ESRCH)):
1650
for child in children:
1651
childpath = joinpath(path, child)
1652
if self._tree.is_control_filename(childpath):
1654
yield self.trans_id_tree_path(childpath)
1656
def _generate_limbo_path(self, trans_id):
1657
"""Generate a limbo path using the final path if possible.
1659
This optimizes the performance of applying the tree transform by
1660
avoiding renames. These renames can be avoided only when the parent
1661
directory is already scheduled for creation.
1663
If the final path cannot be used, falls back to using the trans_id as
1666
parent = self._new_parent.get(trans_id)
1667
# if the parent directory is already in limbo (e.g. when building a
1668
# tree), choose a limbo name inside the parent, to reduce further
1670
use_direct_path = False
1671
if self._new_contents.get(parent) == 'directory':
1672
filename = self._new_name.get(trans_id)
1673
if filename is not None:
1674
if parent not in self._limbo_children:
1675
self._limbo_children[parent] = set()
1676
self._limbo_children_names[parent] = {}
1677
use_direct_path = True
1678
# the direct path can only be used if no other file has
1679
# already taken this pathname, i.e. if the name is unused, or
1680
# if it is already associated with this trans_id.
1681
elif self._case_sensitive_target:
1682
if (self._limbo_children_names[parent].get(filename)
1683
in (trans_id, None)):
1684
use_direct_path = True
1686
for l_filename, l_trans_id in\
1687
self._limbo_children_names[parent].iteritems():
1688
if l_trans_id == trans_id:
1690
if l_filename.lower() == filename.lower():
1693
use_direct_path = True
1695
if not use_direct_path:
1696
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1698
limbo_name = pathjoin(self._limbo_files[parent], filename)
1699
self._limbo_children[parent].add(trans_id)
1700
self._limbo_children_names[parent][filename] = trans_id
1704
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1705
"""Apply all changes to the inventory and filesystem.
1707
If filesystem or inventory conflicts are present, MalformedTransform
1710
If apply succeeds, finalize is not necessary.
1712
:param no_conflicts: if True, the caller guarantees there are no
1713
conflicts, so no check is made.
1714
:param precomputed_delta: An inventory delta to use instead of
1716
:param _mover: Supply an alternate FileMover, for testing
1718
if not no_conflicts:
1719
self._check_malformed()
1720
child_pb = ui.ui_factory.nested_progress_bar()
1722
if precomputed_delta is None:
1723
child_pb.update(gettext('Apply phase'), 0, 2)
1724
inventory_delta = self._generate_inventory_delta()
1727
inventory_delta = precomputed_delta
1730
mover = _FileMover()
1734
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1735
self._apply_removals(mover)
1736
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1737
modified_paths = self._apply_insertions(mover)
1742
mover.apply_deletions()
1745
if self.final_file_id(self.root) is None:
1746
inventory_delta = [e for e in inventory_delta if e[0] != '']
1747
self._tree.apply_inventory_delta(inventory_delta)
1748
self._apply_observed_sha1s()
1751
return _TransformResults(modified_paths, self.rename_count)
1753
def _generate_inventory_delta(self):
1754
"""Generate an inventory delta for the current transform."""
1755
inventory_delta = []
1756
child_pb = ui.ui_factory.nested_progress_bar()
1757
new_paths = self._inventory_altered()
1758
total_entries = len(new_paths) + len(self._removed_id)
1760
for num, trans_id in enumerate(self._removed_id):
1762
child_pb.update(gettext('removing file'), num, total_entries)
1763
if trans_id == self._new_root:
1764
file_id = self._tree.get_root_id()
1766
file_id = self.tree_file_id(trans_id)
1767
# File-id isn't really being deleted, just moved
1768
if file_id in self._r_new_id:
1770
path = self._tree_id_paths[trans_id]
1771
inventory_delta.append((path, None, file_id, None))
1772
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1774
entries = self._tree.iter_entries_by_dir(
1775
new_path_file_ids.values())
1776
old_paths = dict((e.file_id, p) for p, e in entries)
1778
for num, (path, trans_id) in enumerate(new_paths):
1780
child_pb.update(gettext('adding file'),
1781
num + len(self._removed_id), total_entries)
1782
file_id = new_path_file_ids[trans_id]
1786
kind = self.final_kind(trans_id)
1788
kind = self._tree.stored_kind(file_id)
1789
parent_trans_id = self.final_parent(trans_id)
1790
parent_file_id = new_path_file_ids.get(parent_trans_id)
1791
if parent_file_id is None:
1792
parent_file_id = self.final_file_id(parent_trans_id)
1793
if trans_id in self._new_reference_revision:
1794
new_entry = inventory.TreeReference(
1796
self._new_name[trans_id],
1797
self.final_file_id(self._new_parent[trans_id]),
1798
None, self._new_reference_revision[trans_id])
1800
new_entry = inventory.make_entry(kind,
1801
self.final_name(trans_id),
1802
parent_file_id, file_id)
1803
old_path = old_paths.get(new_entry.file_id)
1804
new_executability = self._new_executability.get(trans_id)
1805
if new_executability is not None:
1806
new_entry.executable = new_executability
1807
inventory_delta.append(
1808
(old_path, path, new_entry.file_id, new_entry))
1811
return inventory_delta
1813
def _apply_removals(self, mover):
1814
"""Perform tree operations that remove directory/inventory names.
1816
That is, delete files that are to be deleted, and put any files that
1817
need renaming into limbo. This must be done in strict child-to-parent
1820
If inventory_delta is None, no inventory delta generation is performed.
1822
tree_paths = list(self._tree_path_ids.iteritems())
1823
tree_paths.sort(reverse=True)
1824
child_pb = ui.ui_factory.nested_progress_bar()
1826
for num, (path, trans_id) in enumerate(tree_paths):
1827
# do not attempt to move root into a subdirectory of itself.
1830
child_pb.update(gettext('removing file'), num, len(tree_paths))
1831
full_path = self._tree.abspath(path)
1832
if trans_id in self._removed_contents:
1833
delete_path = os.path.join(self._deletiondir, trans_id)
1834
mover.pre_delete(full_path, delete_path)
1835
elif (trans_id in self._new_name
1836
or trans_id in self._new_parent):
1838
mover.rename(full_path, self._limbo_name(trans_id))
1839
except errors.TransformRenameFailed, e:
1840
if e.errno != errno.ENOENT:
1843
self.rename_count += 1
1847
def _apply_insertions(self, mover):
1848
"""Perform tree operations that insert directory/inventory names.
1850
That is, create any files that need to be created, and restore from
1851
limbo any files that needed renaming. This must be done in strict
1852
parent-to-child order.
1854
If inventory_delta is None, no inventory delta is calculated, and
1855
no list of modified paths is returned.
1857
new_paths = self.new_paths(filesystem_only=True)
1859
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1861
child_pb = ui.ui_factory.nested_progress_bar()
1863
for num, (path, trans_id) in enumerate(new_paths):
1865
child_pb.update(gettext('adding file'), num, len(new_paths))
1866
full_path = self._tree.abspath(path)
1867
if trans_id in self._needs_rename:
1869
mover.rename(self._limbo_name(trans_id), full_path)
1870
except errors.TransformRenameFailed, e:
1871
# We may be renaming a dangling inventory id
1872
if e.errno != errno.ENOENT:
1875
self.rename_count += 1
1876
# TODO: if trans_id in self._observed_sha1s, we should
1877
# re-stat the final target, since ctime will be
1878
# updated by the change.
1879
if (trans_id in self._new_contents or
1880
self.path_changed(trans_id)):
1881
if trans_id in self._new_contents:
1882
modified_paths.append(full_path)
1883
if trans_id in self._new_executability:
1884
self._set_executability(path, trans_id)
1885
if trans_id in self._observed_sha1s:
1886
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1887
st = osutils.lstat(full_path)
1888
self._observed_sha1s[trans_id] = (o_sha1, st)
1891
for path, trans_id in new_paths:
1892
# new_paths includes stuff like workingtree conflicts. Only the
1893
# stuff in new_contents actually comes from limbo.
1894
if trans_id in self._limbo_files:
1895
del self._limbo_files[trans_id]
1896
self._new_contents.clear()
1897
return modified_paths
1899
def _apply_observed_sha1s(self):
1900
"""After we have finished renaming everything, update observed sha1s
1902
This has to be done after self._tree.apply_inventory_delta, otherwise
1903
it doesn't know anything about the files we are updating. Also, we want
1904
to do this as late as possible, so that most entries end up cached.
1906
# TODO: this doesn't update the stat information for directories. So
1907
# the first 'bzr status' will still need to rewrite
1908
# .bzr/checkout/dirstate. However, we at least don't need to
1909
# re-read all of the files.
1910
# TODO: If the operation took a while, we could do a time.sleep(3) here
1911
# to allow the clock to tick over and ensure we won't have any
1912
# problems. (we could observe start time, and finish time, and if
1913
# it is less than eg 10% overhead, add a sleep call.)
1914
paths = FinalPaths(self)
1915
for trans_id, observed in self._observed_sha1s.iteritems():
1916
path = paths.get_path(trans_id)
1917
# We could get the file_id, but dirstate prefers to use the path
1918
# anyway, and it is 'cheaper' to determine.
1919
# file_id = self._new_id[trans_id]
1920
self._tree._observed_sha1(None, path, observed)
1923
class TransformPreview(DiskTreeTransform):
1924
"""A TreeTransform for generating preview trees.
1926
Unlike TreeTransform, this version works when the input tree is a
1927
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1928
unversioned files in the input tree.
1931
def __init__(self, tree, pb=None, case_sensitive=True):
1933
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1934
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1936
def canonical_path(self, path):
1939
def tree_kind(self, trans_id):
1940
path = self._tree_id_paths.get(trans_id)
1943
kind = self._tree.path_content_summary(path)[0]
1944
if kind == 'missing':
1948
def _set_mode(self, trans_id, mode_id, typefunc):
1949
"""Set the mode of new file contents.
1950
The mode_id is the existing file to get the mode from (often the same
1951
as trans_id). The operation is only performed if there's a mode match
1952
according to typefunc.
1954
# is it ok to ignore this? probably
1957
def iter_tree_children(self, parent_id):
1958
"""Iterate through the entry's tree children, if any"""
1960
path = self._tree_id_paths[parent_id]
1963
file_id = self.tree_file_id(parent_id)
1966
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1967
children = getattr(entry, 'children', {})
1968
for child in children:
1969
childpath = joinpath(path, child)
1970
yield self.trans_id_tree_path(childpath)
1972
def new_orphan(self, trans_id, parent_id):
1973
raise NotImplementedError(self.new_orphan)
1976
class _PreviewTree(tree.InventoryTree):
1977
"""Partial implementation of Tree to support show_diff_trees"""
1979
def __init__(self, transform):
1980
self._transform = transform
1981
self._final_paths = FinalPaths(transform)
1982
self.__by_parent = None
1983
self._parent_ids = []
1984
self._all_children_cache = {}
1985
self._path2trans_id_cache = {}
1986
self._final_name_cache = {}
1987
self._iter_changes_cache = dict((c[0], c) for c in
1988
self._transform.iter_changes())
1990
def _content_change(self, file_id):
1991
"""Return True if the content of this file changed"""
1992
changes = self._iter_changes_cache.get(file_id)
1993
# changes[2] is true if the file content changed. See
1994
# InterTree.iter_changes.
1995
return (changes is not None and changes[2])
1997
def _get_repository(self):
1998
repo = getattr(self._transform._tree, '_repository', None)
2000
repo = self._transform._tree.branch.repository
2003
def _iter_parent_trees(self):
2004
for revision_id in self.get_parent_ids():
2006
yield self.revision_tree(revision_id)
2007
except errors.NoSuchRevisionInTree:
2008
yield self._get_repository().revision_tree(revision_id)
2010
def _get_file_revision(self, file_id, vf, tree_revision):
2011
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2012
self._iter_parent_trees()]
2013
vf.add_lines((file_id, tree_revision), parent_keys,
2014
self.get_file_lines(file_id))
2015
repo = self._get_repository()
2016
base_vf = repo.texts
2017
if base_vf not in vf.fallback_versionedfiles:
2018
vf.fallback_versionedfiles.append(base_vf)
2019
return tree_revision
2021
def _stat_limbo_file(self, file_id=None, trans_id=None):
2022
if trans_id is None:
2023
trans_id = self._transform.trans_id_file_id(file_id)
2024
name = self._transform._limbo_name(trans_id)
2025
return os.lstat(name)
2028
def _by_parent(self):
2029
if self.__by_parent is None:
2030
self.__by_parent = self._transform.by_parent()
2031
return self.__by_parent
2033
def _comparison_data(self, entry, path):
2034
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2035
if kind == 'missing':
2039
file_id = self._transform.final_file_id(self._path2trans_id(path))
2040
executable = self.is_executable(file_id, path)
2041
return kind, executable, None
2043
def is_locked(self):
2046
def lock_read(self):
2047
# Perhaps in theory, this should lock the TreeTransform?
2054
def inventory(self):
2055
"""This Tree does not use inventory as its backing data."""
2056
raise NotImplementedError(_PreviewTree.inventory)
2058
def get_root_id(self):
2059
return self._transform.final_file_id(self._transform.root)
2061
def all_file_ids(self):
2062
tree_ids = set(self._transform._tree.all_file_ids())
2063
tree_ids.difference_update(self._transform.tree_file_id(t)
2064
for t in self._transform._removed_id)
2065
tree_ids.update(self._transform._new_id.values())
2069
return iter(self.all_file_ids())
2071
def _has_id(self, file_id, fallback_check):
2072
if file_id in self._transform._r_new_id:
2074
elif file_id in set([self._transform.tree_file_id(trans_id) for
2075
trans_id in self._transform._removed_id]):
2078
return fallback_check(file_id)
2080
def has_id(self, file_id):
2081
return self._has_id(file_id, self._transform._tree.has_id)
2083
def has_or_had_id(self, file_id):
2084
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2086
def _path2trans_id(self, path):
2087
# We must not use None here, because that is a valid value to store.
2088
trans_id = self._path2trans_id_cache.get(path, object)
2089
if trans_id is not object:
2091
segments = splitpath(path)
2092
cur_parent = self._transform.root
2093
for cur_segment in segments:
2094
for child in self._all_children(cur_parent):
2095
final_name = self._final_name_cache.get(child)
2096
if final_name is None:
2097
final_name = self._transform.final_name(child)
2098
self._final_name_cache[child] = final_name
2099
if final_name == cur_segment:
2103
self._path2trans_id_cache[path] = None
2105
self._path2trans_id_cache[path] = cur_parent
2108
def path2id(self, path):
2109
return self._transform.final_file_id(self._path2trans_id(path))
2111
def id2path(self, file_id):
2112
trans_id = self._transform.trans_id_file_id(file_id)
2114
return self._final_paths._determine_path(trans_id)
2116
raise errors.NoSuchId(self, file_id)
2118
def _all_children(self, trans_id):
2119
children = self._all_children_cache.get(trans_id)
2120
if children is not None:
2122
children = set(self._transform.iter_tree_children(trans_id))
2123
# children in the _new_parent set are provided by _by_parent.
2124
children.difference_update(self._transform._new_parent.keys())
2125
children.update(self._by_parent.get(trans_id, []))
2126
self._all_children_cache[trans_id] = children
2129
def iter_children(self, file_id):
2130
trans_id = self._transform.trans_id_file_id(file_id)
2131
for child_trans_id in self._all_children(trans_id):
2132
yield self._transform.final_file_id(child_trans_id)
2135
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2136
in self._transform._tree.extras())
2137
possible_extras.update(self._transform._new_contents)
2138
possible_extras.update(self._transform._removed_id)
2139
for trans_id in possible_extras:
2140
if self._transform.final_file_id(trans_id) is None:
2141
yield self._final_paths._determine_path(trans_id)
2143
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2144
yield_parents=False):
2145
for trans_id, parent_file_id in ordered_entries:
2146
file_id = self._transform.final_file_id(trans_id)
2149
if (specific_file_ids is not None
2150
and file_id not in specific_file_ids):
2152
kind = self._transform.final_kind(trans_id)
2154
kind = self._transform._tree.stored_kind(file_id)
2155
new_entry = inventory.make_entry(
2157
self._transform.final_name(trans_id),
2158
parent_file_id, file_id)
2159
yield new_entry, trans_id
2161
def _list_files_by_dir(self):
2162
todo = [ROOT_PARENT]
2164
while len(todo) > 0:
2166
parent_file_id = self._transform.final_file_id(parent)
2167
children = list(self._all_children(parent))
2168
paths = dict(zip(children, self._final_paths.get_paths(children)))
2169
children.sort(key=paths.get)
2170
todo.extend(reversed(children))
2171
for trans_id in children:
2172
ordered_ids.append((trans_id, parent_file_id))
2175
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2176
# This may not be a maximally efficient implementation, but it is
2177
# reasonably straightforward. An implementation that grafts the
2178
# TreeTransform changes onto the tree's iter_entries_by_dir results
2179
# might be more efficient, but requires tricky inferences about stack
2181
ordered_ids = self._list_files_by_dir()
2182
for entry, trans_id in self._make_inv_entries(ordered_ids,
2183
specific_file_ids, yield_parents=yield_parents):
2184
yield unicode(self._final_paths.get_path(trans_id)), entry
2186
def _iter_entries_for_dir(self, dir_path):
2187
"""Return path, entry for items in a directory without recursing down."""
2188
dir_file_id = self.path2id(dir_path)
2190
for file_id in self.iter_children(dir_file_id):
2191
trans_id = self._transform.trans_id_file_id(file_id)
2192
ordered_ids.append((trans_id, file_id))
2193
for entry, trans_id in self._make_inv_entries(ordered_ids):
2194
yield unicode(self._final_paths.get_path(trans_id)), entry
2196
def list_files(self, include_root=False, from_dir=None, recursive=True):
2197
"""See WorkingTree.list_files."""
2198
# XXX This should behave like WorkingTree.list_files, but is really
2199
# more like RevisionTree.list_files.
2203
prefix = from_dir + '/'
2204
entries = self.iter_entries_by_dir()
2205
for path, entry in entries:
2206
if entry.name == '' and not include_root:
2209
if not path.startswith(prefix):
2211
path = path[len(prefix):]
2212
yield path, 'V', entry.kind, entry.file_id, entry
2214
if from_dir is None and include_root is True:
2215
root_entry = inventory.make_entry('directory', '',
2216
ROOT_PARENT, self.get_root_id())
2217
yield '', 'V', 'directory', root_entry.file_id, root_entry
2218
entries = self._iter_entries_for_dir(from_dir or '')
2219
for path, entry in entries:
2220
yield path, 'V', entry.kind, entry.file_id, entry
2222
def kind(self, file_id):
2223
trans_id = self._transform.trans_id_file_id(file_id)
2224
return self._transform.final_kind(trans_id)
2226
def stored_kind(self, file_id):
2227
trans_id = self._transform.trans_id_file_id(file_id)
2229
return self._transform._new_contents[trans_id]
2231
return self._transform._tree.stored_kind(file_id)
2233
def get_file_mtime(self, file_id, path=None):
2234
"""See Tree.get_file_mtime"""
2235
if not self._content_change(file_id):
2236
return self._transform._tree.get_file_mtime(file_id)
2237
return self._stat_limbo_file(file_id).st_mtime
2239
def _file_size(self, entry, stat_value):
2240
return self.get_file_size(entry.file_id)
2242
def get_file_size(self, file_id):
2243
"""See Tree.get_file_size"""
2244
trans_id = self._transform.trans_id_file_id(file_id)
2245
kind = self._transform.final_kind(trans_id)
2248
if trans_id in self._transform._new_contents:
2249
return self._stat_limbo_file(trans_id=trans_id).st_size
2250
if self.kind(file_id) == 'file':
2251
return self._transform._tree.get_file_size(file_id)
2255
def get_file_verifier(self, file_id, path=None, stat_value=None):
2256
trans_id = self._transform.trans_id_file_id(file_id)
2257
kind = self._transform._new_contents.get(trans_id)
2259
return self._transform._tree.get_file_verifier(file_id)
2261
fileobj = self.get_file(file_id)
2263
return ("SHA1", sha_file(fileobj))
2267
def get_file_sha1(self, file_id, path=None, stat_value=None):
2268
trans_id = self._transform.trans_id_file_id(file_id)
2269
kind = self._transform._new_contents.get(trans_id)
2271
return self._transform._tree.get_file_sha1(file_id)
2273
fileobj = self.get_file(file_id)
2275
return sha_file(fileobj)
2279
def is_executable(self, file_id, path=None):
2282
trans_id = self._transform.trans_id_file_id(file_id)
2284
return self._transform._new_executability[trans_id]
2287
return self._transform._tree.is_executable(file_id, path)
2289
if e.errno == errno.ENOENT:
2292
except errors.NoSuchId:
2295
def has_filename(self, path):
2296
trans_id = self._path2trans_id(path)
2297
if trans_id in self._transform._new_contents:
2299
elif trans_id in self._transform._removed_contents:
2302
return self._transform._tree.has_filename(path)
2304
def path_content_summary(self, path):
2305
trans_id = self._path2trans_id(path)
2306
tt = self._transform
2307
tree_path = tt._tree_id_paths.get(trans_id)
2308
kind = tt._new_contents.get(trans_id)
2310
if tree_path is None or trans_id in tt._removed_contents:
2311
return 'missing', None, None, None
2312
summary = tt._tree.path_content_summary(tree_path)
2313
kind, size, executable, link_or_sha1 = summary
2316
limbo_name = tt._limbo_name(trans_id)
2317
if trans_id in tt._new_reference_revision:
2318
kind = 'tree-reference'
2320
statval = os.lstat(limbo_name)
2321
size = statval.st_size
2322
if not supports_executable():
2325
executable = statval.st_mode & S_IEXEC
2329
if kind == 'symlink':
2330
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2331
executable = tt._new_executability.get(trans_id, executable)
2332
return kind, size, executable, link_or_sha1
2334
def iter_changes(self, from_tree, include_unchanged=False,
2335
specific_files=None, pb=None, extra_trees=None,
2336
require_versioned=True, want_unversioned=False):
2337
"""See InterTree.iter_changes.
2339
This has a fast path that is only used when the from_tree matches
2340
the transform tree, and no fancy options are supplied.
2342
if (from_tree is not self._transform._tree or include_unchanged or
2343
specific_files or want_unversioned):
2344
return tree.InterTree(from_tree, self).iter_changes(
2345
include_unchanged=include_unchanged,
2346
specific_files=specific_files,
2348
extra_trees=extra_trees,
2349
require_versioned=require_versioned,
2350
want_unversioned=want_unversioned)
2351
if want_unversioned:
2352
raise ValueError('want_unversioned is not supported')
2353
return self._transform.iter_changes()
2355
def get_file(self, file_id, path=None):
2356
"""See Tree.get_file"""
2357
if not self._content_change(file_id):
2358
return self._transform._tree.get_file(file_id, path)
2359
trans_id = self._transform.trans_id_file_id(file_id)
2360
name = self._transform._limbo_name(trans_id)
2361
return open(name, 'rb')
2363
def get_file_with_stat(self, file_id, path=None):
2364
return self.get_file(file_id, path), None
2366
def annotate_iter(self, file_id,
2367
default_revision=_mod_revision.CURRENT_REVISION):
2368
changes = self._iter_changes_cache.get(file_id)
2372
changed_content, versioned, kind = (changes[2], changes[3],
2376
get_old = (kind[0] == 'file' and versioned[0])
2378
old_annotation = self._transform._tree.annotate_iter(file_id,
2379
default_revision=default_revision)
2383
return old_annotation
2384
if not changed_content:
2385
return old_annotation
2386
# TODO: This is doing something similar to what WT.annotate_iter is
2387
# doing, however it fails slightly because it doesn't know what
2388
# the *other* revision_id is, so it doesn't know how to give the
2389
# other as the origin for some lines, they all get
2390
# 'default_revision'
2391
# It would be nice to be able to use the new Annotator based
2392
# approach, as well.
2393
return annotate.reannotate([old_annotation],
2394
self.get_file(file_id).readlines(),
2397
def get_symlink_target(self, file_id, path=None):
2398
"""See Tree.get_symlink_target"""
2399
if not self._content_change(file_id):
2400
return self._transform._tree.get_symlink_target(file_id)
2401
trans_id = self._transform.trans_id_file_id(file_id)
2402
name = self._transform._limbo_name(trans_id)
2403
return osutils.readlink(name)
2405
def walkdirs(self, prefix=''):
2406
pending = [self._transform.root]
2407
while len(pending) > 0:
2408
parent_id = pending.pop()
2411
prefix = prefix.rstrip('/')
2412
parent_path = self._final_paths.get_path(parent_id)
2413
parent_file_id = self._transform.final_file_id(parent_id)
2414
for child_id in self._all_children(parent_id):
2415
path_from_root = self._final_paths.get_path(child_id)
2416
basename = self._transform.final_name(child_id)
2417
file_id = self._transform.final_file_id(child_id)
2418
kind = self._transform.final_kind(child_id)
2419
if kind is not None:
2420
versioned_kind = kind
2423
versioned_kind = self._transform._tree.stored_kind(file_id)
2424
if versioned_kind == 'directory':
2425
subdirs.append(child_id)
2426
children.append((path_from_root, basename, kind, None,
2427
file_id, versioned_kind))
2429
if parent_path.startswith(prefix):
2430
yield (parent_path, parent_file_id), children
2431
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2434
def get_parent_ids(self):
2435
return self._parent_ids
2437
def set_parent_ids(self, parent_ids):
2438
self._parent_ids = parent_ids
2440
def get_revision_tree(self, revision_id):
2441
return self._transform._tree.get_revision_tree(revision_id)
864
2444
def joinpath(parent, child):
865
2445
"""Join tree-relative paths, handling the tree root specially"""
866
2446
if parent is None or parent == "":
896
2476
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2477
return self._known_paths[trans_id]
2479
def get_paths(self, trans_ids):
2480
return [(self.get_path(t), t) for t in trans_ids]
899
2484
def topology_sorted_ids(tree):
900
2485
"""Determine the topological order of the ids in a tree"""
901
2486
file_ids = list(tree)
902
2487
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2491
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2492
delta_from_tree=False):
2493
"""Create working tree for a branch, using a TreeTransform.
2495
This function should be used on empty trees, having a tree root at most.
2496
(see merge and revert functionality for working with existing trees)
2498
Existing files are handled like so:
2500
- Existing bzrdirs take precedence over creating new items. They are
2501
created as '%s.diverted' % name.
2502
- Otherwise, if the content on disk matches the content we are building,
2503
it is silently replaced.
2504
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2506
:param tree: The tree to convert wt into a copy of
2507
:param wt: The working tree that files will be placed into
2508
:param accelerator_tree: A tree which can be used for retrieving file
2509
contents more quickly than tree itself, i.e. a workingtree. tree
2510
will be used for cases where accelerator_tree's content is different.
2511
:param hardlink: If true, hard-link files to accelerator_tree, where
2512
possible. accelerator_tree must implement abspath, i.e. be a
2514
:param delta_from_tree: If true, build_tree may use the input Tree to
2515
generate the inventory delta.
2517
wt.lock_tree_write()
2521
if accelerator_tree is not None:
2522
accelerator_tree.lock_read()
2524
return _build_tree(tree, wt, accelerator_tree, hardlink,
2527
if accelerator_tree is not None:
2528
accelerator_tree.unlock()
2535
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2536
"""See build_tree."""
2537
for num, _unused in enumerate(wt.all_file_ids()):
2538
if num > 0: # more than just a root
2539
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2540
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2541
top_pb = ui.ui_factory.nested_progress_bar()
909
2542
pp = ProgressPhase("Build phase", 2, top_pb)
2543
if tree.get_root_id() is not None:
2544
# This is kind of a hack: we should be altering the root
2545
# as part of the regular tree shape diff logic.
2546
# The conditional test here is to avoid doing an
2547
# expensive operation (flush) every time the root id
2548
# is set within the tree, nor setting the root and thus
2549
# marking the tree as dirty, because we use two different
2550
# idioms here: tree interfaces and inventory interfaces.
2551
if wt.get_root_id() != tree.get_root_id():
2552
wt.set_root_id(tree.get_root_id())
910
2554
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2558
file_trans_id[wt.get_root_id()] = \
2559
tt.trans_id_tree_file_id(wt.get_root_id())
2560
pb = ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2562
deferred_contents = []
2564
total = len(tree.all_file_ids())
2566
precomputed_delta = []
2568
precomputed_delta = None
2569
# Check if tree inventory has content. If so, we populate
2570
# existing_files with the directory content. If there are no
2571
# entries we skip populating existing_files as its not used.
2572
# This improves performance and unncessary work on large
2573
# directory trees. (#501307)
2575
existing_files = set()
2576
for dir, files in wt.walkdirs():
2577
existing_files.update(f[0] for f in files)
2578
for num, (tree_path, entry) in \
2579
enumerate(tree.iter_entries_by_dir()):
2580
pb.update(gettext("Building tree"), num - len(deferred_contents), total)
920
2581
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2584
file_id = entry.file_id
2586
precomputed_delta.append((None, tree_path, file_id, entry))
2587
if tree_path in existing_files:
2588
target_path = wt.abspath(tree_path)
2589
kind = file_kind(target_path)
2590
if kind == "directory":
2592
controldir.ControlDir.open(target_path)
2593
except errors.NotBranchError:
2597
if (file_id not in divert and
2598
_content_match(tree, entry, file_id, kind,
2600
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2601
if kind == 'directory':
924
2603
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2604
if entry.kind == 'file':
2605
# We *almost* replicate new_by_entry, so that we can defer
2606
# getting the file text, and get them all at once.
2607
trans_id = tt.create_path(entry.name, parent_id)
2608
file_trans_id[file_id] = trans_id
2609
tt.version_file(file_id, trans_id)
2610
executable = tree.is_executable(file_id, tree_path)
2612
tt.set_executability(executable, trans_id)
2613
trans_data = (trans_id, tree_path, entry.text_sha1)
2614
deferred_contents.append((file_id, trans_data))
2616
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2619
new_trans_id = file_trans_id[file_id]
2620
old_parent = tt.trans_id_tree_path(tree_path)
2621
_reparent_children(tt, old_parent, new_trans_id)
2622
offset = num + 1 - len(deferred_contents)
2623
_create_files(tt, tree, deferred_contents, pb, offset,
2624
accelerator_tree, hardlink)
2628
divert_trans = set(file_trans_id[f] for f in divert)
2629
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2630
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2631
if len(raw_conflicts) > 0:
2632
precomputed_delta = None
2633
conflicts = cook_conflicts(raw_conflicts, tt)
2634
for conflict in conflicts:
2635
trace.warning(unicode(conflict))
2637
wt.add_conflicts(conflicts)
2638
except errors.UnsupportedOperation:
2640
result = tt.apply(no_conflicts=True,
2641
precomputed_delta=precomputed_delta)
933
2644
top_pb.finished()
2648
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2650
total = len(desired_files) + offset
2652
if accelerator_tree is None:
2653
new_desired_files = desired_files
2655
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2656
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2657
in iter if not (c or e[0] != e[1])]
2658
if accelerator_tree.supports_content_filtering():
2659
unchanged = [(f, p) for (f, p) in unchanged
2660
if not accelerator_tree.iter_search_rules([p]).next()]
2661
unchanged = dict(unchanged)
2662
new_desired_files = []
2664
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2665
accelerator_path = unchanged.get(file_id)
2666
if accelerator_path is None:
2667
new_desired_files.append((file_id,
2668
(trans_id, tree_path, text_sha1)))
2670
pb.update(gettext('Adding file contents'), count + offset, total)
2672
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2675
contents = accelerator_tree.get_file(file_id, accelerator_path)
2676
if wt.supports_content_filtering():
2677
filters = wt._content_filter_stack(tree_path)
2678
contents = filtered_output_bytes(contents, filters,
2679
ContentFilterContext(tree_path, tree))
2681
tt.create_file(contents, trans_id, sha1=text_sha1)
2685
except AttributeError:
2686
# after filtering, contents may no longer be file-like
2690
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2691
tree.iter_files_bytes(new_desired_files)):
2692
if wt.supports_content_filtering():
2693
filters = wt._content_filter_stack(tree_path)
2694
contents = filtered_output_bytes(contents, filters,
2695
ContentFilterContext(tree_path, tree))
2696
tt.create_file(contents, trans_id, sha1=text_sha1)
2697
pb.update(gettext('Adding file contents'), count + offset, total)
2700
def _reparent_children(tt, old_parent, new_parent):
2701
for child in tt.iter_tree_children(old_parent):
2702
tt.adjust_path(tt.final_name(child), new_parent, child)
2705
def _reparent_transform_children(tt, old_parent, new_parent):
2706
by_parent = tt.by_parent()
2707
for child in by_parent[old_parent]:
2708
tt.adjust_path(tt.final_name(child), new_parent, child)
2709
return by_parent[old_parent]
2712
def _content_match(tree, entry, file_id, kind, target_path):
2713
if entry.kind != kind:
2715
if entry.kind == "directory":
2717
if entry.kind == "file":
2718
f = file(target_path, 'rb')
2720
if tree.get_file_text(file_id) == f.read():
2724
elif entry.kind == "symlink":
2725
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2730
def resolve_checkout(tt, conflicts, divert):
2731
new_conflicts = set()
2732
for c_type, conflict in ((c[0], c) for c in conflicts):
2733
# Anything but a 'duplicate' would indicate programmer error
2734
if c_type != 'duplicate':
2735
raise AssertionError(c_type)
2736
# Now figure out which is new and which is old
2737
if tt.new_contents(conflict[1]):
2738
new_file = conflict[1]
2739
old_file = conflict[2]
2741
new_file = conflict[2]
2742
old_file = conflict[1]
2744
# We should only get here if the conflict wasn't completely
2746
final_parent = tt.final_parent(old_file)
2747
if new_file in divert:
2748
new_name = tt.final_name(old_file)+'.diverted'
2749
tt.adjust_path(new_name, final_parent, new_file)
2750
new_conflicts.add((c_type, 'Diverted to',
2751
new_file, old_file))
2753
new_name = tt.final_name(old_file)+'.moved'
2754
tt.adjust_path(new_name, final_parent, old_file)
2755
new_conflicts.add((c_type, 'Moved existing file to',
2756
old_file, new_file))
2757
return new_conflicts
935
2760
def new_by_entry(tt, entry, parent_id, tree):
936
2761
"""Create a new file according to its inventory entry"""
964
2814
tt.set_executability(entry.executable, trans_id)
967
def find_interesting(working_tree, target_tree, filenames):
968
"""Find the ids corresponding to specified filenames."""
970
interesting_ids = None
972
interesting_ids = set()
973
for tree_path in filenames:
975
for tree in (working_tree, target_tree):
976
file_id = tree.inventory.path2id(tree_path)
977
if file_id is not None:
978
interesting_ids.add(file_id)
981
raise NotVersionedError(path=tree_path)
982
return interesting_ids
985
def change_entry(tt, file_id, working_tree, target_tree,
986
trans_id_file_id, backups, trans_id, by_parent):
987
"""Replace a file_id's contents with those from a target tree."""
988
e_trans_id = trans_id_file_id(file_id)
989
entry = target_tree.inventory[file_id]
990
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
996
tt.delete_contents(e_trans_id)
998
parent_trans_id = trans_id_file_id(entry.parent_id)
999
backup_name = get_backup_name(entry, by_parent,
1000
parent_trans_id, tt)
1001
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1002
tt.unversion_file(e_trans_id)
1003
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1004
tt.version_file(file_id, e_trans_id)
1005
trans_id[file_id] = e_trans_id
1006
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1007
create_entry_executability(tt, entry, e_trans_id)
1010
tt.set_executability(entry.executable, e_trans_id)
1011
if tt.final_name(e_trans_id) != entry.name:
1014
parent_id = tt.final_parent(e_trans_id)
1015
parent_file_id = tt.final_file_id(parent_id)
1016
if parent_file_id != entry.parent_id:
1021
parent_trans_id = trans_id_file_id(entry.parent_id)
1022
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
2817
@deprecated_function(deprecated_in((2, 3, 0)))
1025
2818
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2819
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2822
@deprecated_function(deprecated_in((2, 3, 0)))
2823
def _get_backup_name(name, by_parent, parent_trans_id, tt):
1026
2824
"""Produce a backup-style name that appears to be available"""
1027
2825
def name_gen():
1030
yield "%s.~%d~" % (entry.name, counter)
2828
yield "%s.~%d~" % (name, counter)
1032
for name in name_gen():
1033
if not tt.has_named_child(by_parent, parent_trans_id, name):
1036
def _entry_changes(file_id, entry, working_tree):
1037
"""Determine in which ways the inventory entry has changed.
1039
Returns booleans: has_contents, content_mod, meta_mod
1040
has_contents means there are currently contents, but they differ
1041
contents_mod means contents need to be modified
1042
meta_mod means the metadata needs to be modified
1044
cur_entry = working_tree.inventory[file_id]
1046
working_kind = working_tree.kind(file_id)
1049
if e.errno != errno.ENOENT:
1051
has_contents = False
1054
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
1059
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
cur_entry._forget_tree_state()
1065
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2830
for new_name in name_gen():
2831
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2835
def revert(working_tree, target_tree, filenames, backups=False,
2836
pb=None, change_reporter=None):
1070
2837
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
2838
target_tree.lock_read()
2839
pb = ui.ui_factory.nested_progress_bar()
1075
2840
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2842
pp = ProgressPhase("Revert phase", 3, pb)
2843
conflicts, merge_modified = _prepare_revert_transform(
2844
working_tree, target_tree, tt, filenames, backups, pp)
2846
change_reporter = delta._ChangeReporter(
2847
unversioned_filter=working_tree.is_ignored)
2848
delta.report_changes(tt.iter_changes(), change_reporter)
2849
for conflict in conflicts:
2850
trace.warning(unicode(conflict))
2853
working_tree.set_merge_modified(merge_modified)
2855
target_tree.unlock()
2861
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2862
backups, pp, basis_tree=None,
2863
merge_modified=None):
2864
child_pb = ui.ui_factory.nested_progress_bar()
2866
if merge_modified is None:
2867
merge_modified = working_tree.merge_modified()
2868
merge_modified = _alter_files(working_tree, target_tree, tt,
2869
child_pb, filenames, backups,
2870
merge_modified, basis_tree)
2873
child_pb = ui.ui_factory.nested_progress_bar()
2875
raw_conflicts = resolve_conflicts(tt, child_pb,
2876
lambda t, c: conflict_pass(t, c, target_tree))
2879
conflicts = cook_conflicts(raw_conflicts, tt)
2880
return conflicts, merge_modified
2883
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2884
backups, merge_modified, basis_tree=None):
2885
if basis_tree is not None:
2886
basis_tree.lock_read()
2887
# We ask the working_tree for its changes relative to the target, rather
2888
# than the target changes relative to the working tree. Because WT4 has an
2889
# optimizer to compare itself to a target, but no optimizer for the
2891
change_list = working_tree.iter_changes(target_tree,
2892
specific_files=specific_files, pb=pb)
2893
if target_tree.get_root_id() is None:
2899
for id_num, (file_id, path, changed_content, versioned, parent, name,
2900
kind, executable) in enumerate(change_list):
2901
target_path, wt_path = path
2902
target_versioned, wt_versioned = versioned
2903
target_parent, wt_parent = parent
2904
target_name, wt_name = name
2905
target_kind, wt_kind = kind
2906
target_executable, wt_executable = executable
2907
if skip_root and wt_parent is None:
2909
trans_id = tt.trans_id_file_id(file_id)
2912
keep_content = False
2913
if wt_kind == 'file' and (backups or target_kind is None):
2914
wt_sha1 = working_tree.get_file_sha1(file_id)
2915
if merge_modified.get(file_id) != wt_sha1:
2916
# acquire the basis tree lazily to prevent the
2917
# expense of accessing it when it's not needed ?
2918
# (Guessing, RBC, 200702)
2919
if basis_tree is None:
2920
basis_tree = working_tree.basis_tree()
2921
basis_tree.lock_read()
2922
if basis_tree.has_id(file_id):
2923
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2925
elif target_kind is None and not target_versioned:
2927
if wt_kind is not None:
2928
if not keep_content:
1121
2929
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2930
elif target_kind is not None:
2931
parent_trans_id = tt.trans_id_file_id(wt_parent)
2932
backup_name = tt._available_backup_name(
2933
wt_name, parent_trans_id)
2934
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2935
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2936
if wt_versioned and target_versioned:
2937
tt.unversion_file(trans_id)
2938
tt.version_file(file_id, new_trans_id)
2939
# New contents should have the same unix perms as old
2942
trans_id = new_trans_id
2943
if target_kind in ('directory', 'tree-reference'):
2944
tt.create_directory(trans_id)
2945
if target_kind == 'tree-reference':
2946
revision = target_tree.get_reference_revision(file_id,
2948
tt.set_tree_reference(revision, trans_id)
2949
elif target_kind == 'symlink':
2950
tt.create_symlink(target_tree.get_symlink_target(file_id),
2952
elif target_kind == 'file':
2953
deferred_files.append((file_id, (trans_id, mode_id)))
2954
if basis_tree is None:
2955
basis_tree = working_tree.basis_tree()
2956
basis_tree.lock_read()
2957
new_sha1 = target_tree.get_file_sha1(file_id)
2958
if (basis_tree.has_id(file_id) and
2959
new_sha1 == basis_tree.get_file_sha1(file_id)):
2960
if file_id in merge_modified:
2961
del merge_modified[file_id]
2963
merge_modified[file_id] = new_sha1
2965
# preserve the execute bit when backing up
2966
if keep_content and wt_executable == target_executable:
2967
tt.set_executability(target_executable, trans_id)
2968
elif target_kind is not None:
2969
raise AssertionError(target_kind)
2970
if not wt_versioned and target_versioned:
2971
tt.version_file(file_id, trans_id)
2972
if wt_versioned and not target_versioned:
2973
tt.unversion_file(trans_id)
2974
if (target_name is not None and
2975
(wt_name != target_name or wt_parent != target_parent)):
2976
if target_name == '' and target_parent is None:
2977
parent_trans = ROOT_PARENT
2979
parent_trans = tt.trans_id_file_id(target_parent)
2980
if wt_parent is None and wt_versioned:
2981
tt.adjust_root_path(target_name, parent_trans)
2983
tt.adjust_path(target_name, parent_trans, trans_id)
2984
if wt_executable != target_executable and target_kind == "file":
2985
tt.set_executability(target_executable, trans_id)
2986
if working_tree.supports_content_filtering():
2987
for index, ((trans_id, mode_id), bytes) in enumerate(
2988
target_tree.iter_files_bytes(deferred_files)):
2989
file_id = deferred_files[index][0]
2990
# We're reverting a tree to the target tree so using the
2991
# target tree to find the file path seems the best choice
2992
# here IMO - Ian C 27/Oct/2009
2993
filter_tree_path = target_tree.id2path(file_id)
2994
filters = working_tree._content_filter_stack(filter_tree_path)
2995
bytes = filtered_output_bytes(bytes, filters,
2996
ContentFilterContext(filter_tree_path, working_tree))
2997
tt.create_file(bytes, trans_id, mode_id)
2999
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3001
tt.create_file(bytes, trans_id, mode_id)
3002
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
3004
if basis_tree is not None:
3006
return merge_modified
3009
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3010
"""Make many conflict-resolution attempts, but die if they fail"""
3011
if pass_func is None:
3012
pass_func = conflict_pass
1145
3013
new_conflicts = set()
3014
pb = ui.ui_factory.nested_progress_bar()
1147
3016
for n in range(10):
1148
pb.update('Resolution pass', n+1, 10)
3017
pb.update(gettext('Resolution pass'), n+1, 10)
1149
3018
conflicts = tt.find_conflicts()
1150
3019
if len(conflicts) == 0:
1151
3020
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3021
new_conflicts.update(pass_func(tt, conflicts))
1153
3022
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3027
def conflict_pass(tt, conflicts, path_tree=None):
3028
"""Resolve some classes of conflicts.
3030
:param tt: The transform to resolve conflicts in
3031
:param conflicts: The conflicts to resolve
3032
:param path_tree: A Tree to get supplemental paths from
1160
3034
new_conflicts = set()
1161
3035
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3036
if c_type == 'duplicate id':