861
830
self.create_symlink(target, trans_id)
833
def new_orphan(self, trans_id, parent_id):
834
"""Schedule an item to be orphaned.
836
When a directory is about to be removed, its children, if they are not
837
versioned are moved out of the way: they don't have a parent anymore.
839
:param trans_id: The trans_id of the existing item.
840
:param parent_id: The parent trans_id of the item.
842
raise NotImplementedError(self.new_orphan)
844
def _get_potential_orphans(self, dir_id):
845
"""Find the potential orphans in a directory.
847
A directory can't be safely deleted if there are versioned files in it.
848
If all the contained files are unversioned then they can be orphaned.
850
The 'None' return value means that the directory contains at least one
851
versioned file and should not be deleted.
853
:param dir_id: The directory trans id.
855
:return: A list of the orphan trans ids or None if at least one
856
versioned file is present.
859
# Find the potential orphans, stop if one item should be kept
860
for child_tid in self.by_parent()[dir_id]:
861
if child_tid in self._removed_contents:
862
# The child is removed as part of the transform. Since it was
863
# versioned before, it's not an orphan
865
elif self.final_file_id(child_tid) is None:
866
# The child is not versioned
867
orphans.append(child_tid)
869
# We have a versioned file here, searching for orphans is
875
def _affected_ids(self):
876
"""Return the set of transform ids affected by the transform"""
877
trans_ids = set(self._removed_id)
878
trans_ids.update(self._new_id.keys())
879
trans_ids.update(self._removed_contents)
880
trans_ids.update(self._new_contents.keys())
881
trans_ids.update(self._new_executability.keys())
882
trans_ids.update(self._new_name.keys())
883
trans_ids.update(self._new_parent.keys())
886
def _get_file_id_maps(self):
887
"""Return mapping of file_ids to trans_ids in the to and from states"""
888
trans_ids = self._affected_ids()
891
# Build up two dicts: trans_ids associated with file ids in the
892
# FROM state, vs the TO state.
893
for trans_id in trans_ids:
894
from_file_id = self.tree_file_id(trans_id)
895
if from_file_id is not None:
896
from_trans_ids[from_file_id] = trans_id
897
to_file_id = self.final_file_id(trans_id)
898
if to_file_id is not None:
899
to_trans_ids[to_file_id] = trans_id
900
return from_trans_ids, to_trans_ids
902
def _from_file_data(self, from_trans_id, from_versioned, file_id):
903
"""Get data about a file in the from (tree) state
905
Return a (name, parent, kind, executable) tuple
907
from_path = self._tree_id_paths.get(from_trans_id)
909
# get data from working tree if versioned
910
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
911
from_name = from_entry.name
912
from_parent = from_entry.parent_id
915
if from_path is None:
916
# File does not exist in FROM state
920
# File exists, but is not versioned. Have to use path-
922
from_name = os.path.basename(from_path)
923
tree_parent = self.get_tree_parent(from_trans_id)
924
from_parent = self.tree_file_id(tree_parent)
925
if from_path is not None:
926
from_kind, from_executable, from_stats = \
927
self._tree._comparison_data(from_entry, from_path)
930
from_executable = False
931
return from_name, from_parent, from_kind, from_executable
933
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
934
"""Get data about a file in the to (target) state
936
Return a (name, parent, kind, executable) tuple
938
to_name = self.final_name(to_trans_id)
939
to_kind = self.final_kind(to_trans_id)
940
to_parent = self.final_file_id(self.final_parent(to_trans_id))
941
if to_trans_id in self._new_executability:
942
to_executable = self._new_executability[to_trans_id]
943
elif to_trans_id == from_trans_id:
944
to_executable = from_executable
946
to_executable = False
947
return to_name, to_parent, to_kind, to_executable
949
def iter_changes(self):
950
"""Produce output in the same format as Tree.iter_changes.
952
Will produce nonsensical results if invoked while inventory/filesystem
953
conflicts (as reported by TreeTransform.find_conflicts()) are present.
955
This reads the Transform, but only reproduces changes involving a
956
file_id. Files that are not versioned in either of the FROM or TO
957
states are not reflected.
959
final_paths = FinalPaths(self)
960
from_trans_ids, to_trans_ids = self._get_file_id_maps()
962
# Now iterate through all active file_ids
963
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
965
from_trans_id = from_trans_ids.get(file_id)
966
# find file ids, and determine versioning state
967
if from_trans_id is None:
968
from_versioned = False
969
from_trans_id = to_trans_ids[file_id]
971
from_versioned = True
972
to_trans_id = to_trans_ids.get(file_id)
973
if to_trans_id is None:
975
to_trans_id = from_trans_id
979
from_name, from_parent, from_kind, from_executable = \
980
self._from_file_data(from_trans_id, from_versioned, file_id)
982
to_name, to_parent, to_kind, to_executable = \
983
self._to_file_data(to_trans_id, from_trans_id, from_executable)
985
if not from_versioned:
988
from_path = self._tree_id_paths.get(from_trans_id)
992
to_path = final_paths.get_path(to_trans_id)
993
if from_kind != to_kind:
995
elif to_kind in ('file', 'symlink') and (
996
to_trans_id != from_trans_id or
997
to_trans_id in self._new_contents):
999
if (not modified and from_versioned == to_versioned and
1000
from_parent==to_parent and from_name == to_name and
1001
from_executable == to_executable):
1003
results.append((file_id, (from_path, to_path), modified,
1004
(from_versioned, to_versioned),
1005
(from_parent, to_parent),
1006
(from_name, to_name),
1007
(from_kind, to_kind),
1008
(from_executable, to_executable)))
1009
return iter(sorted(results, key=lambda x:x[1]))
1011
def get_preview_tree(self):
1012
"""Return a tree representing the result of the transform.
1014
The tree is a snapshot, and altering the TreeTransform will invalidate
1017
return _PreviewTree(self)
1019
def commit(self, branch, message, merge_parents=None, strict=False,
1020
timestamp=None, timezone=None, committer=None, authors=None,
1021
revprops=None, revision_id=None):
1022
"""Commit the result of this TreeTransform to a branch.
1024
:param branch: The branch to commit to.
1025
:param message: The message to attach to the commit.
1026
:param merge_parents: Additional parent revision-ids specified by
1028
:param strict: If True, abort the commit if there are unversioned
1030
:param timestamp: if not None, seconds-since-epoch for the time and
1031
date. (May be a float.)
1032
:param timezone: Optional timezone for timestamp, as an offset in
1034
:param committer: Optional committer in email-id format.
1035
(e.g. "J Random Hacker <jrandom@example.com>")
1036
:param authors: Optional list of authors in email-id format.
1037
:param revprops: Optional dictionary of revision properties.
1038
:param revision_id: Optional revision id. (Specifying a revision-id
1039
may reduce performance for some non-native formats.)
1040
:return: The revision_id of the revision committed.
1042
self._check_malformed()
1044
unversioned = set(self._new_contents).difference(set(self._new_id))
1045
for trans_id in unversioned:
1046
if self.final_file_id(trans_id) is None:
1047
raise errors.StrictCommitFailed()
1049
revno, last_rev_id = branch.last_revision_info()
1050
if last_rev_id == _mod_revision.NULL_REVISION:
1051
if merge_parents is not None:
1052
raise ValueError('Cannot supply merge parents for first'
1056
parent_ids = [last_rev_id]
1057
if merge_parents is not None:
1058
parent_ids.extend(merge_parents)
1059
if self._tree.get_revision_id() != last_rev_id:
1060
raise ValueError('TreeTransform not based on branch basis: %s' %
1061
self._tree.get_revision_id())
1062
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1063
builder = branch.get_commit_builder(parent_ids,
1064
timestamp=timestamp,
1066
committer=committer,
1068
revision_id=revision_id)
1069
preview = self.get_preview_tree()
1070
list(builder.record_iter_changes(preview, last_rev_id,
1071
self.iter_changes()))
1072
builder.finish_inventory()
1073
revision_id = builder.commit(message)
1074
branch.set_last_revision_info(revno + 1, revision_id)
1077
def _text_parent(self, trans_id):
1078
file_id = self.tree_file_id(trans_id)
1080
if file_id is None or self._tree.kind(file_id) != 'file':
1082
except errors.NoSuchFile:
1086
def _get_parents_texts(self, trans_id):
1087
"""Get texts for compression parents of this file."""
1088
file_id = self._text_parent(trans_id)
1091
return (self._tree.get_file_text(file_id),)
1093
def _get_parents_lines(self, trans_id):
1094
"""Get lines for compression parents of this file."""
1095
file_id = self._text_parent(trans_id)
1098
return (self._tree.get_file_lines(file_id),)
1100
def serialize(self, serializer):
1101
"""Serialize this TreeTransform.
1103
:param serializer: A Serialiser like pack.ContainerSerializer.
1105
new_name = dict((k, v.encode('utf-8')) for k, v in
1106
self._new_name.items())
1107
new_executability = dict((k, int(v)) for k, v in
1108
self._new_executability.items())
1109
tree_path_ids = dict((k.encode('utf-8'), v)
1110
for k, v in self._tree_path_ids.items())
1112
'_id_number': self._id_number,
1113
'_new_name': new_name,
1114
'_new_parent': self._new_parent,
1115
'_new_executability': new_executability,
1116
'_new_id': self._new_id,
1117
'_tree_path_ids': tree_path_ids,
1118
'_removed_id': list(self._removed_id),
1119
'_removed_contents': list(self._removed_contents),
1120
'_non_present_ids': self._non_present_ids,
1122
yield serializer.bytes_record(bencode.bencode(attribs),
1124
for trans_id, kind in self._new_contents.items():
1126
lines = osutils.chunks_to_lines(
1127
self._read_file_chunks(trans_id))
1128
parents = self._get_parents_lines(trans_id)
1129
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1130
content = ''.join(mpdiff.to_patch())
1131
if kind == 'directory':
1133
if kind == 'symlink':
1134
content = self._read_symlink_target(trans_id)
1135
yield serializer.bytes_record(content, ((trans_id, kind),))
1137
def deserialize(self, records):
1138
"""Deserialize a stored TreeTransform.
1140
:param records: An iterable of (names, content) tuples, as per
1141
pack.ContainerPushParser.
1143
names, content = records.next()
1144
attribs = bencode.bdecode(content)
1145
self._id_number = attribs['_id_number']
1146
self._new_name = dict((k, v.decode('utf-8'))
1147
for k, v in attribs['_new_name'].items())
1148
self._new_parent = attribs['_new_parent']
1149
self._new_executability = dict((k, bool(v)) for k, v in
1150
attribs['_new_executability'].items())
1151
self._new_id = attribs['_new_id']
1152
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1153
self._tree_path_ids = {}
1154
self._tree_id_paths = {}
1155
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1156
path = bytepath.decode('utf-8')
1157
self._tree_path_ids[path] = trans_id
1158
self._tree_id_paths[trans_id] = path
1159
self._removed_id = set(attribs['_removed_id'])
1160
self._removed_contents = set(attribs['_removed_contents'])
1161
self._non_present_ids = attribs['_non_present_ids']
1162
for ((trans_id, kind),), content in records:
1164
mpdiff = multiparent.MultiParent.from_patch(content)
1165
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1166
self.create_file(lines, trans_id)
1167
if kind == 'directory':
1168
self.create_directory(trans_id)
1169
if kind == 'symlink':
1170
self.create_symlink(content.decode('utf-8'), trans_id)
1173
class DiskTreeTransform(TreeTransformBase):
1174
"""Tree transform storing its contents on disk."""
1176
def __init__(self, tree, limbodir, pb=None,
1177
case_sensitive=True):
1179
:param tree: The tree that will be transformed, but not necessarily
1181
:param limbodir: A directory where new files can be stored until
1182
they are installed in their proper places
1184
:param case_sensitive: If True, the target of the transform is
1185
case sensitive, not just case preserving.
1187
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1188
self._limbodir = limbodir
1189
self._deletiondir = None
1190
# A mapping of transform ids to their limbo filename
1191
self._limbo_files = {}
1192
self._possibly_stale_limbo_files = set()
1193
# A mapping of transform ids to a set of the transform ids of children
1194
# that their limbo directory has
1195
self._limbo_children = {}
1196
# Map transform ids to maps of child filename to child transform id
1197
self._limbo_children_names = {}
1198
# List of transform ids that need to be renamed from limbo into place
1199
self._needs_rename = set()
1200
self._creation_mtime = None
1203
"""Release the working tree lock, if held, clean up limbo dir.
1205
This is required if apply has not been invoked, but can be invoked
1208
if self._tree is None:
1211
limbo_paths = self._limbo_files.values() + list(
1212
self._possibly_stale_limbo_files)
1213
limbo_paths = sorted(limbo_paths, reverse=True)
1214
for path in limbo_paths:
1218
if e.errno != errno.ENOENT:
1220
# XXX: warn? perhaps we just got interrupted at an
1221
# inconvenient moment, but perhaps files are disappearing
1224
delete_any(self._limbodir)
1226
# We don't especially care *why* the dir is immortal.
1227
raise ImmortalLimbo(self._limbodir)
1229
if self._deletiondir is not None:
1230
delete_any(self._deletiondir)
1232
raise errors.ImmortalPendingDeletion(self._deletiondir)
1234
TreeTransformBase.finalize(self)
1236
def _limbo_name(self, trans_id):
1237
"""Generate the limbo name of a file"""
1238
limbo_name = self._limbo_files.get(trans_id)
1239
if limbo_name is None:
1240
limbo_name = self._generate_limbo_path(trans_id)
1241
self._limbo_files[trans_id] = limbo_name
1244
def _generate_limbo_path(self, trans_id):
1245
"""Generate a limbo path using the trans_id as the relative path.
1247
This is suitable as a fallback, and when the transform should not be
1248
sensitive to the path encoding of the limbo directory.
1250
self._needs_rename.add(trans_id)
1251
return pathjoin(self._limbodir, trans_id)
1253
def adjust_path(self, name, parent, trans_id):
1254
previous_parent = self._new_parent.get(trans_id)
1255
previous_name = self._new_name.get(trans_id)
1256
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1257
if (trans_id in self._limbo_files and
1258
trans_id not in self._needs_rename):
1259
self._rename_in_limbo([trans_id])
1260
if previous_parent != parent:
1261
self._limbo_children[previous_parent].remove(trans_id)
1262
if previous_parent != parent or previous_name != name:
1263
del self._limbo_children_names[previous_parent][previous_name]
1265
def _rename_in_limbo(self, trans_ids):
1266
"""Fix limbo names so that the right final path is produced.
1268
This means we outsmarted ourselves-- we tried to avoid renaming
1269
these files later by creating them with their final names in their
1270
final parents. But now the previous name or parent is no longer
1271
suitable, so we have to rename them.
1273
Even for trans_ids that have no new contents, we must remove their
1274
entries from _limbo_files, because they are now stale.
1276
for trans_id in trans_ids:
1277
old_path = self._limbo_files[trans_id]
1278
self._possibly_stale_limbo_files.add(old_path)
1279
del self._limbo_files[trans_id]
1280
if trans_id not in self._new_contents:
1282
new_path = self._limbo_name(trans_id)
1283
os.rename(old_path, new_path)
1284
self._possibly_stale_limbo_files.remove(old_path)
1285
for descendant in self._limbo_descendants(trans_id):
1286
desc_path = self._limbo_files[descendant]
1287
desc_path = new_path + desc_path[len(old_path):]
1288
self._limbo_files[descendant] = desc_path
1290
def _limbo_descendants(self, trans_id):
1291
"""Return the set of trans_ids whose limbo paths descend from this."""
1292
descendants = set(self._limbo_children.get(trans_id, []))
1293
for descendant in list(descendants):
1294
descendants.update(self._limbo_descendants(descendant))
1297
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1298
"""Schedule creation of a new file.
1302
:param contents: an iterator of strings, all of which will be written
1303
to the target destination.
1304
:param trans_id: TreeTransform handle
1305
:param mode_id: If not None, force the mode of the target file to match
1306
the mode of the object referenced by mode_id.
1307
Otherwise, we will try to preserve mode bits of an existing file.
1308
:param sha1: If the sha1 of this content is already known, pass it in.
1309
We can use it to prevent future sha1 computations.
1311
name = self._limbo_name(trans_id)
1312
f = open(name, 'wb')
1314
unique_add(self._new_contents, trans_id, 'file')
1315
f.writelines(contents)
1318
self._set_mtime(name)
1319
self._set_mode(trans_id, mode_id, S_ISREG)
1320
# It is unfortunate we have to use lstat instead of fstat, but we just
1321
# used utime and chmod on the file, so we need the accurate final
1323
if sha1 is not None:
1324
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1326
def _read_file_chunks(self, trans_id):
1327
cur_file = open(self._limbo_name(trans_id), 'rb')
1329
return cur_file.readlines()
1333
def _read_symlink_target(self, trans_id):
1334
return os.readlink(self._limbo_name(trans_id))
1336
def _set_mtime(self, path):
1337
"""All files that are created get the same mtime.
1339
This time is set by the first object to be created.
1341
if self._creation_mtime is None:
1342
self._creation_mtime = time.time()
1343
os.utime(path, (self._creation_mtime, self._creation_mtime))
1345
def create_hardlink(self, path, trans_id):
1346
"""Schedule creation of a hard link"""
1347
name = self._limbo_name(trans_id)
1351
if e.errno != errno.EPERM:
1353
raise errors.HardLinkNotSupported(path)
1355
unique_add(self._new_contents, trans_id, 'file')
1357
# Clean up the file, it never got registered so
1358
# TreeTransform.finalize() won't clean it up.
1362
def create_directory(self, trans_id):
1363
"""Schedule creation of a new directory.
1365
See also new_directory.
1367
os.mkdir(self._limbo_name(trans_id))
1368
unique_add(self._new_contents, trans_id, 'directory')
1370
def create_symlink(self, target, trans_id):
1371
"""Schedule creation of a new symbolic link.
1373
target is a bytestring.
1374
See also new_symlink.
1377
os.symlink(target, self._limbo_name(trans_id))
1378
unique_add(self._new_contents, trans_id, 'symlink')
1381
path = FinalPaths(self).get_path(trans_id)
1384
raise UnableCreateSymlink(path=path)
1386
def cancel_creation(self, trans_id):
1387
"""Cancel the creation of new file contents."""
1388
del self._new_contents[trans_id]
1389
if trans_id in self._observed_sha1s:
1390
del self._observed_sha1s[trans_id]
1391
children = self._limbo_children.get(trans_id)
1392
# if this is a limbo directory with children, move them before removing
1394
if children is not None:
1395
self._rename_in_limbo(children)
1396
del self._limbo_children[trans_id]
1397
del self._limbo_children_names[trans_id]
1398
delete_any(self._limbo_name(trans_id))
1400
def new_orphan(self, trans_id, parent_id):
1401
# FIXME: There is no tree config, so we use the branch one (it's weird
1402
# to define it this way as orphaning can only occur in a working tree,
1403
# but that's all we have (for now). It will find the option in
1404
# locations.conf or bazaar.conf though) -- vila 20100916
1405
conf = self._tree.branch.get_config()
1406
conf_var_name = 'bzr.transform.orphan_policy'
1407
orphan_policy = conf.get_user_option(conf_var_name)
1408
default_policy = orphaning_registry.default_key
1409
if orphan_policy is None:
1410
orphan_policy = default_policy
1411
if orphan_policy not in orphaning_registry:
1412
trace.warning('%s (from %s) is not a known policy, defaulting '
1413
'to %s' % (orphan_policy, conf_var_name, default_policy))
1414
orphan_policy = default_policy
1415
handle_orphan = orphaning_registry.get(orphan_policy)
1416
handle_orphan(self, trans_id, parent_id)
1419
class OrphaningError(errors.BzrError):
1421
# Only bugs could lead to such exception being seen by the user
1422
internal_error = True
1423
_fmt = "Error while orphaning %s in %s directory"
1425
def __init__(self, orphan, parent):
1426
errors.BzrError.__init__(self)
1427
self.orphan = orphan
1428
self.parent = parent
1431
class OrphaningForbidden(OrphaningError):
1433
_fmt = "Policy: %s doesn't allow creating orphans."
1435
def __init__(self, policy):
1436
errors.BzrError.__init__(self)
1437
self.policy = policy
1440
def move_orphan(tt, orphan_id, parent_id):
1441
"""See TreeTransformBase.new_orphan.
1443
This creates a new orphan in the `bzr-orphans` dir at the root of the
1446
:param tt: The TreeTransform orphaning `trans_id`.
1448
:param orphan_id: The trans id that should be orphaned.
1450
:param parent_id: The orphan parent trans id.
1452
# Add the orphan dir if it doesn't exist
1453
orphan_dir_basename = 'bzr-orphans'
1454
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1455
if tt.final_kind(od_id) is None:
1456
tt.create_directory(od_id)
1457
parent_path = tt._tree_id_paths[parent_id]
1458
# Find a name that doesn't exist yet in the orphan dir
1459
actual_name = tt.final_name(orphan_id)
1460
new_name = tt._available_backup_name(actual_name, od_id)
1461
tt.adjust_path(new_name, od_id, orphan_id)
1462
trace.warning('%s has been orphaned in %s'
1463
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1466
def refuse_orphan(tt, orphan_id, parent_id):
1467
"""See TreeTransformBase.new_orphan.
1469
This refuses to create orphan, letting the caller handle the conflict.
1471
raise OrphaningForbidden('never')
1474
orphaning_registry = registry.Registry()
1475
orphaning_registry.register(
1476
'conflict', refuse_orphan,
1477
'Leave orphans in place and create a conflict on the directory.')
1478
orphaning_registry.register(
1479
'move', move_orphan,
1480
'Move orphans into the bzr-orphans directory.')
1481
orphaning_registry._set_default_key('conflict')
1484
class TreeTransform(DiskTreeTransform):
1485
"""Represent a tree transformation.
1487
This object is designed to support incremental generation of the transform,
1490
However, it gives optimum performance when parent directories are created
1491
before their contents. The transform is then able to put child files
1492
directly in their parent directory, avoiding later renames.
1494
It is easy to produce malformed transforms, but they are generally
1495
harmless. Attempting to apply a malformed transform will cause an
1496
exception to be raised before any modifications are made to the tree.
1498
Many kinds of malformed transforms can be corrected with the
1499
resolve_conflicts function. The remaining ones indicate programming error,
1500
such as trying to create a file with no path.
1502
Two sets of file creation methods are supplied. Convenience methods are:
1507
These are composed of the low-level methods:
1509
* create_file or create_directory or create_symlink
1513
Transform/Transaction ids
1514
-------------------------
1515
trans_ids are temporary ids assigned to all files involved in a transform.
1516
It's possible, even common, that not all files in the Tree have trans_ids.
1518
trans_ids are used because filenames and file_ids are not good enough
1519
identifiers; filenames change, and not all files have file_ids. File-ids
1520
are also associated with trans-ids, so that moving a file moves its
1523
trans_ids are only valid for the TreeTransform that generated them.
1527
Limbo is a temporary directory use to hold new versions of files.
1528
Files are added to limbo by create_file, create_directory, create_symlink,
1529
and their convenience variants (new_*). Files may be removed from limbo
1530
using cancel_creation. Files are renamed from limbo into their final
1531
location as part of TreeTransform.apply
1533
Limbo must be cleaned up, by either calling TreeTransform.apply or
1534
calling TreeTransform.finalize.
1536
Files are placed into limbo inside their parent directories, where
1537
possible. This reduces subsequent renames, and makes operations involving
1538
lots of files faster. This optimization is only possible if the parent
1539
directory is created *before* creating any of its children, so avoid
1540
creating children before parents, where possible.
1544
This temporary directory is used by _FileMover for storing files that are
1545
about to be deleted. In case of rollback, the files will be restored.
1546
FileMover does not delete files until it is sure that a rollback will not
1549
def __init__(self, tree, pb=None):
1550
"""Note: a tree_write lock is taken on the tree.
1552
Use TreeTransform.finalize() to release the lock (can be omitted if
1553
TreeTransform.apply() called).
1555
tree.lock_tree_write()
1558
limbodir = urlutils.local_path_from_url(
1559
tree._transport.abspath('limbo'))
1563
if e.errno == errno.EEXIST:
1564
raise ExistingLimbo(limbodir)
1565
deletiondir = urlutils.local_path_from_url(
1566
tree._transport.abspath('pending-deletion'))
1568
os.mkdir(deletiondir)
1570
if e.errno == errno.EEXIST:
1571
raise errors.ExistingPendingDeletion(deletiondir)
1576
# Cache of realpath results, to speed up canonical_path
1577
self._realpaths = {}
1578
# Cache of relpath results, to speed up canonical_path
1580
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1581
tree.case_sensitive)
1582
self._deletiondir = deletiondir
1584
def canonical_path(self, path):
1585
"""Get the canonical tree-relative path"""
1586
# don't follow final symlinks
1587
abs = self._tree.abspath(path)
1588
if abs in self._relpaths:
1589
return self._relpaths[abs]
1590
dirname, basename = os.path.split(abs)
1591
if dirname not in self._realpaths:
1592
self._realpaths[dirname] = os.path.realpath(dirname)
1593
dirname = self._realpaths[dirname]
1594
abs = pathjoin(dirname, basename)
1595
if dirname in self._relpaths:
1596
relpath = pathjoin(self._relpaths[dirname], basename)
1597
relpath = relpath.rstrip('/\\')
1599
relpath = self._tree.relpath(abs)
1600
self._relpaths[abs] = relpath
1603
def tree_kind(self, trans_id):
1604
"""Determine the file kind in the working tree.
1606
:returns: The file kind or None if the file does not exist
1608
path = self._tree_id_paths.get(trans_id)
1612
return file_kind(self._tree.abspath(path))
1613
except errors.NoSuchFile:
1616
def _set_mode(self, trans_id, mode_id, typefunc):
1617
"""Set the mode of new file contents.
1618
The mode_id is the existing file to get the mode from (often the same
1619
as trans_id). The operation is only performed if there's a mode match
1620
according to typefunc.
1625
old_path = self._tree_id_paths[mode_id]
1629
mode = os.stat(self._tree.abspath(old_path)).st_mode
1631
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1632
# Either old_path doesn't exist, or the parent of the
1633
# target is not a directory (but will be one eventually)
1634
# Either way, we know it doesn't exist *right now*
1635
# See also bug #248448
1640
os.chmod(self._limbo_name(trans_id), mode)
1642
def iter_tree_children(self, parent_id):
1643
"""Iterate through the entry's tree children, if any"""
1645
path = self._tree_id_paths[parent_id]
1649
children = os.listdir(self._tree.abspath(path))
1651
if not (osutils._is_error_enotdir(e)
1652
or e.errno in (errno.ENOENT, errno.ESRCH)):
1656
for child in children:
1657
childpath = joinpath(path, child)
1658
if self._tree.is_control_filename(childpath):
1660
yield self.trans_id_tree_path(childpath)
1662
def _generate_limbo_path(self, trans_id):
1663
"""Generate a limbo path using the final path if possible.
1665
This optimizes the performance of applying the tree transform by
1666
avoiding renames. These renames can be avoided only when the parent
1667
directory is already scheduled for creation.
1669
If the final path cannot be used, falls back to using the trans_id as
1672
parent = self._new_parent.get(trans_id)
1673
# if the parent directory is already in limbo (e.g. when building a
1674
# tree), choose a limbo name inside the parent, to reduce further
1676
use_direct_path = False
1677
if self._new_contents.get(parent) == 'directory':
1678
filename = self._new_name.get(trans_id)
1679
if filename is not None:
1680
if parent not in self._limbo_children:
1681
self._limbo_children[parent] = set()
1682
self._limbo_children_names[parent] = {}
1683
use_direct_path = True
1684
# the direct path can only be used if no other file has
1685
# already taken this pathname, i.e. if the name is unused, or
1686
# if it is already associated with this trans_id.
1687
elif self._case_sensitive_target:
1688
if (self._limbo_children_names[parent].get(filename)
1689
in (trans_id, None)):
1690
use_direct_path = True
1692
for l_filename, l_trans_id in\
1693
self._limbo_children_names[parent].iteritems():
1694
if l_trans_id == trans_id:
1696
if l_filename.lower() == filename.lower():
1699
use_direct_path = True
1701
if not use_direct_path:
1702
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1704
limbo_name = pathjoin(self._limbo_files[parent], filename)
1705
self._limbo_children[parent].add(trans_id)
1706
self._limbo_children_names[parent][filename] = trans_id
1710
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1711
"""Apply all changes to the inventory and filesystem.
1713
If filesystem or inventory conflicts are present, MalformedTransform
1716
If apply succeeds, finalize is not necessary.
1718
:param no_conflicts: if True, the caller guarantees there are no
1719
conflicts, so no check is made.
1720
:param precomputed_delta: An inventory delta to use instead of
1722
:param _mover: Supply an alternate FileMover, for testing
1724
if not no_conflicts:
1725
self._check_malformed()
1726
child_pb = ui.ui_factory.nested_progress_bar()
1728
if precomputed_delta is None:
1729
child_pb.update('Apply phase', 0, 2)
1730
inventory_delta = self._generate_inventory_delta()
1733
inventory_delta = precomputed_delta
1736
mover = _FileMover()
1740
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1741
self._apply_removals(mover)
1742
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1743
modified_paths = self._apply_insertions(mover)
1748
mover.apply_deletions()
1751
if self.final_file_id(self.root) is None:
1752
inventory_delta = [e for e in inventory_delta if e[0] != '']
1753
self._tree.apply_inventory_delta(inventory_delta)
1754
self._apply_observed_sha1s()
1757
return _TransformResults(modified_paths, self.rename_count)
1759
def _generate_inventory_delta(self):
1760
"""Generate an inventory delta for the current transform."""
1761
inventory_delta = []
1762
child_pb = ui.ui_factory.nested_progress_bar()
1763
new_paths = self._inventory_altered()
1764
total_entries = len(new_paths) + len(self._removed_id)
1766
for num, trans_id in enumerate(self._removed_id):
1768
child_pb.update('removing file', num, total_entries)
1769
if trans_id == self._new_root:
1770
file_id = self._tree.get_root_id()
1772
file_id = self.tree_file_id(trans_id)
1773
# File-id isn't really being deleted, just moved
1774
if file_id in self._r_new_id:
1776
path = self._tree_id_paths[trans_id]
1777
inventory_delta.append((path, None, file_id, None))
1778
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1780
entries = self._tree.iter_entries_by_dir(
1781
new_path_file_ids.values())
1782
old_paths = dict((e.file_id, p) for p, e in entries)
1784
for num, (path, trans_id) in enumerate(new_paths):
1786
child_pb.update('adding file',
1787
num + len(self._removed_id), total_entries)
1788
file_id = new_path_file_ids[trans_id]
1792
kind = self.final_kind(trans_id)
1794
kind = self._tree.stored_kind(file_id)
1795
parent_trans_id = self.final_parent(trans_id)
1796
parent_file_id = new_path_file_ids.get(parent_trans_id)
1797
if parent_file_id is None:
1798
parent_file_id = self.final_file_id(parent_trans_id)
1799
if trans_id in self._new_reference_revision:
1800
new_entry = inventory.TreeReference(
1802
self._new_name[trans_id],
1803
self.final_file_id(self._new_parent[trans_id]),
1804
None, self._new_reference_revision[trans_id])
1806
new_entry = inventory.make_entry(kind,
1807
self.final_name(trans_id),
1808
parent_file_id, file_id)
1809
old_path = old_paths.get(new_entry.file_id)
1810
new_executability = self._new_executability.get(trans_id)
1811
if new_executability is not None:
1812
new_entry.executable = new_executability
1813
inventory_delta.append(
1814
(old_path, path, new_entry.file_id, new_entry))
1817
return inventory_delta
1819
def _apply_removals(self, mover):
1820
"""Perform tree operations that remove directory/inventory names.
1822
That is, delete files that are to be deleted, and put any files that
1823
need renaming into limbo. This must be done in strict child-to-parent
1826
If inventory_delta is None, no inventory delta generation is performed.
1828
tree_paths = list(self._tree_path_ids.iteritems())
1829
tree_paths.sort(reverse=True)
1830
child_pb = ui.ui_factory.nested_progress_bar()
1832
for num, (path, trans_id) in enumerate(tree_paths):
1833
# do not attempt to move root into a subdirectory of itself.
1836
child_pb.update('removing file', num, len(tree_paths))
1837
full_path = self._tree.abspath(path)
1838
if trans_id in self._removed_contents:
1839
delete_path = os.path.join(self._deletiondir, trans_id)
1840
mover.pre_delete(full_path, delete_path)
1841
elif (trans_id in self._new_name
1842
or trans_id in self._new_parent):
1844
mover.rename(full_path, self._limbo_name(trans_id))
1845
except errors.TransformRenameFailed, e:
1846
if e.errno != errno.ENOENT:
1849
self.rename_count += 1
1853
def _apply_insertions(self, mover):
1854
"""Perform tree operations that insert directory/inventory names.
1856
That is, create any files that need to be created, and restore from
1857
limbo any files that needed renaming. This must be done in strict
1858
parent-to-child order.
1860
If inventory_delta is None, no inventory delta is calculated, and
1861
no list of modified paths is returned.
1863
new_paths = self.new_paths(filesystem_only=True)
1865
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1867
child_pb = ui.ui_factory.nested_progress_bar()
1869
for num, (path, trans_id) in enumerate(new_paths):
1871
child_pb.update('adding file', num, len(new_paths))
1872
full_path = self._tree.abspath(path)
1873
if trans_id in self._needs_rename:
1875
mover.rename(self._limbo_name(trans_id), full_path)
1876
except errors.TransformRenameFailed, e:
1877
# We may be renaming a dangling inventory id
1878
if e.errno != errno.ENOENT:
1881
self.rename_count += 1
1882
# TODO: if trans_id in self._observed_sha1s, we should
1883
# re-stat the final target, since ctime will be
1884
# updated by the change.
1885
if (trans_id in self._new_contents or
1886
self.path_changed(trans_id)):
1887
if trans_id in self._new_contents:
1888
modified_paths.append(full_path)
1889
if trans_id in self._new_executability:
1890
self._set_executability(path, trans_id)
1891
if trans_id in self._observed_sha1s:
1892
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1893
st = osutils.lstat(full_path)
1894
self._observed_sha1s[trans_id] = (o_sha1, st)
1897
for path, trans_id in new_paths:
1898
# new_paths includes stuff like workingtree conflicts. Only the
1899
# stuff in new_contents actually comes from limbo.
1900
if trans_id in self._limbo_files:
1901
del self._limbo_files[trans_id]
1902
self._new_contents.clear()
1903
return modified_paths
1905
def _apply_observed_sha1s(self):
1906
"""After we have finished renaming everything, update observed sha1s
1908
This has to be done after self._tree.apply_inventory_delta, otherwise
1909
it doesn't know anything about the files we are updating. Also, we want
1910
to do this as late as possible, so that most entries end up cached.
1912
# TODO: this doesn't update the stat information for directories. So
1913
# the first 'bzr status' will still need to rewrite
1914
# .bzr/checkout/dirstate. However, we at least don't need to
1915
# re-read all of the files.
1916
# TODO: If the operation took a while, we could do a time.sleep(3) here
1917
# to allow the clock to tick over and ensure we won't have any
1918
# problems. (we could observe start time, and finish time, and if
1919
# it is less than eg 10% overhead, add a sleep call.)
1920
paths = FinalPaths(self)
1921
for trans_id, observed in self._observed_sha1s.iteritems():
1922
path = paths.get_path(trans_id)
1923
# We could get the file_id, but dirstate prefers to use the path
1924
# anyway, and it is 'cheaper' to determine.
1925
# file_id = self._new_id[trans_id]
1926
self._tree._observed_sha1(None, path, observed)
1929
class TransformPreview(DiskTreeTransform):
1930
"""A TreeTransform for generating preview trees.
1932
Unlike TreeTransform, this version works when the input tree is a
1933
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1934
unversioned files in the input tree.
1937
def __init__(self, tree, pb=None, case_sensitive=True):
1939
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1940
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1942
def canonical_path(self, path):
1945
def tree_kind(self, trans_id):
1946
path = self._tree_id_paths.get(trans_id)
1949
kind = self._tree.path_content_summary(path)[0]
1950
if kind == 'missing':
1954
def _set_mode(self, trans_id, mode_id, typefunc):
1955
"""Set the mode of new file contents.
1956
The mode_id is the existing file to get the mode from (often the same
1957
as trans_id). The operation is only performed if there's a mode match
1958
according to typefunc.
1960
# is it ok to ignore this? probably
1963
def iter_tree_children(self, parent_id):
1964
"""Iterate through the entry's tree children, if any"""
1966
path = self._tree_id_paths[parent_id]
1969
file_id = self.tree_file_id(parent_id)
1972
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1973
children = getattr(entry, 'children', {})
1974
for child in children:
1975
childpath = joinpath(path, child)
1976
yield self.trans_id_tree_path(childpath)
1978
def new_orphan(self, trans_id, parent_id):
1979
raise NotImplementedError(self.new_orphan)
1982
class _PreviewTree(tree.InventoryTree):
1983
"""Partial implementation of Tree to support show_diff_trees"""
1985
def __init__(self, transform):
1986
self._transform = transform
1987
self._final_paths = FinalPaths(transform)
1988
self.__by_parent = None
1989
self._parent_ids = []
1990
self._all_children_cache = {}
1991
self._path2trans_id_cache = {}
1992
self._final_name_cache = {}
1993
self._iter_changes_cache = dict((c[0], c) for c in
1994
self._transform.iter_changes())
1996
def _content_change(self, file_id):
1997
"""Return True if the content of this file changed"""
1998
changes = self._iter_changes_cache.get(file_id)
1999
# changes[2] is true if the file content changed. See
2000
# InterTree.iter_changes.
2001
return (changes is not None and changes[2])
2003
def _get_repository(self):
2004
repo = getattr(self._transform._tree, '_repository', None)
2006
repo = self._transform._tree.branch.repository
2009
def _iter_parent_trees(self):
2010
for revision_id in self.get_parent_ids():
2012
yield self.revision_tree(revision_id)
2013
except errors.NoSuchRevisionInTree:
2014
yield self._get_repository().revision_tree(revision_id)
2016
def _get_file_revision(self, file_id, vf, tree_revision):
2017
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2018
self._iter_parent_trees()]
2019
vf.add_lines((file_id, tree_revision), parent_keys,
2020
self.get_file_lines(file_id))
2021
repo = self._get_repository()
2022
base_vf = repo.texts
2023
if base_vf not in vf.fallback_versionedfiles:
2024
vf.fallback_versionedfiles.append(base_vf)
2025
return tree_revision
2027
def _stat_limbo_file(self, file_id=None, trans_id=None):
2028
if trans_id is None:
2029
trans_id = self._transform.trans_id_file_id(file_id)
2030
name = self._transform._limbo_name(trans_id)
2031
return os.lstat(name)
2034
def _by_parent(self):
2035
if self.__by_parent is None:
2036
self.__by_parent = self._transform.by_parent()
2037
return self.__by_parent
2039
def _comparison_data(self, entry, path):
2040
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2041
if kind == 'missing':
2045
file_id = self._transform.final_file_id(self._path2trans_id(path))
2046
executable = self.is_executable(file_id, path)
2047
return kind, executable, None
2049
def is_locked(self):
2052
def lock_read(self):
2053
# Perhaps in theory, this should lock the TreeTransform?
2060
def inventory(self):
2061
"""This Tree does not use inventory as its backing data."""
2062
raise NotImplementedError(_PreviewTree.inventory)
2064
def get_root_id(self):
2065
return self._transform.final_file_id(self._transform.root)
2067
def all_file_ids(self):
2068
tree_ids = set(self._transform._tree.all_file_ids())
2069
tree_ids.difference_update(self._transform.tree_file_id(t)
2070
for t in self._transform._removed_id)
2071
tree_ids.update(self._transform._new_id.values())
2075
return iter(self.all_file_ids())
2077
def _has_id(self, file_id, fallback_check):
2078
if file_id in self._transform._r_new_id:
2080
elif file_id in set([self._transform.tree_file_id(trans_id) for
2081
trans_id in self._transform._removed_id]):
2084
return fallback_check(file_id)
2086
def has_id(self, file_id):
2087
return self._has_id(file_id, self._transform._tree.has_id)
2089
def has_or_had_id(self, file_id):
2090
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2092
def _path2trans_id(self, path):
2093
# We must not use None here, because that is a valid value to store.
2094
trans_id = self._path2trans_id_cache.get(path, object)
2095
if trans_id is not object:
2097
segments = splitpath(path)
2098
cur_parent = self._transform.root
2099
for cur_segment in segments:
2100
for child in self._all_children(cur_parent):
2101
final_name = self._final_name_cache.get(child)
2102
if final_name is None:
2103
final_name = self._transform.final_name(child)
2104
self._final_name_cache[child] = final_name
2105
if final_name == cur_segment:
2109
self._path2trans_id_cache[path] = None
2111
self._path2trans_id_cache[path] = cur_parent
2114
def path2id(self, path):
2115
return self._transform.final_file_id(self._path2trans_id(path))
2117
def id2path(self, file_id):
2118
trans_id = self._transform.trans_id_file_id(file_id)
2120
return self._final_paths._determine_path(trans_id)
2122
raise errors.NoSuchId(self, file_id)
2124
def _all_children(self, trans_id):
2125
children = self._all_children_cache.get(trans_id)
2126
if children is not None:
2128
children = set(self._transform.iter_tree_children(trans_id))
2129
# children in the _new_parent set are provided by _by_parent.
2130
children.difference_update(self._transform._new_parent.keys())
2131
children.update(self._by_parent.get(trans_id, []))
2132
self._all_children_cache[trans_id] = children
2135
def iter_children(self, file_id):
2136
trans_id = self._transform.trans_id_file_id(file_id)
2137
for child_trans_id in self._all_children(trans_id):
2138
yield self._transform.final_file_id(child_trans_id)
2141
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2142
in self._transform._tree.extras())
2143
possible_extras.update(self._transform._new_contents)
2144
possible_extras.update(self._transform._removed_id)
2145
for trans_id in possible_extras:
2146
if self._transform.final_file_id(trans_id) is None:
2147
yield self._final_paths._determine_path(trans_id)
2149
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2150
yield_parents=False):
2151
for trans_id, parent_file_id in ordered_entries:
2152
file_id = self._transform.final_file_id(trans_id)
2155
if (specific_file_ids is not None
2156
and file_id not in specific_file_ids):
2158
kind = self._transform.final_kind(trans_id)
2160
kind = self._transform._tree.stored_kind(file_id)
2161
new_entry = inventory.make_entry(
2163
self._transform.final_name(trans_id),
2164
parent_file_id, file_id)
2165
yield new_entry, trans_id
2167
def _list_files_by_dir(self):
2168
todo = [ROOT_PARENT]
2170
while len(todo) > 0:
2172
parent_file_id = self._transform.final_file_id(parent)
2173
children = list(self._all_children(parent))
2174
paths = dict(zip(children, self._final_paths.get_paths(children)))
2175
children.sort(key=paths.get)
2176
todo.extend(reversed(children))
2177
for trans_id in children:
2178
ordered_ids.append((trans_id, parent_file_id))
2181
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2182
# This may not be a maximally efficient implementation, but it is
2183
# reasonably straightforward. An implementation that grafts the
2184
# TreeTransform changes onto the tree's iter_entries_by_dir results
2185
# might be more efficient, but requires tricky inferences about stack
2187
ordered_ids = self._list_files_by_dir()
2188
for entry, trans_id in self._make_inv_entries(ordered_ids,
2189
specific_file_ids, yield_parents=yield_parents):
2190
yield unicode(self._final_paths.get_path(trans_id)), entry
2192
def _iter_entries_for_dir(self, dir_path):
2193
"""Return path, entry for items in a directory without recursing down."""
2194
dir_file_id = self.path2id(dir_path)
2196
for file_id in self.iter_children(dir_file_id):
2197
trans_id = self._transform.trans_id_file_id(file_id)
2198
ordered_ids.append((trans_id, file_id))
2199
for entry, trans_id in self._make_inv_entries(ordered_ids):
2200
yield unicode(self._final_paths.get_path(trans_id)), entry
2202
def list_files(self, include_root=False, from_dir=None, recursive=True):
2203
"""See WorkingTree.list_files."""
2204
# XXX This should behave like WorkingTree.list_files, but is really
2205
# more like RevisionTree.list_files.
2209
prefix = from_dir + '/'
2210
entries = self.iter_entries_by_dir()
2211
for path, entry in entries:
2212
if entry.name == '' and not include_root:
2215
if not path.startswith(prefix):
2217
path = path[len(prefix):]
2218
yield path, 'V', entry.kind, entry.file_id, entry
2220
if from_dir is None and include_root is True:
2221
root_entry = inventory.make_entry('directory', '',
2222
ROOT_PARENT, self.get_root_id())
2223
yield '', 'V', 'directory', root_entry.file_id, root_entry
2224
entries = self._iter_entries_for_dir(from_dir or '')
2225
for path, entry in entries:
2226
yield path, 'V', entry.kind, entry.file_id, entry
2228
def kind(self, file_id):
2229
trans_id = self._transform.trans_id_file_id(file_id)
2230
return self._transform.final_kind(trans_id)
2232
def stored_kind(self, file_id):
2233
trans_id = self._transform.trans_id_file_id(file_id)
2235
return self._transform._new_contents[trans_id]
2237
return self._transform._tree.stored_kind(file_id)
2239
def get_file_mtime(self, file_id, path=None):
2240
"""See Tree.get_file_mtime"""
2241
if not self._content_change(file_id):
2242
return self._transform._tree.get_file_mtime(file_id)
2243
return self._stat_limbo_file(file_id).st_mtime
2245
def _file_size(self, entry, stat_value):
2246
return self.get_file_size(entry.file_id)
2248
def get_file_size(self, file_id):
2249
"""See Tree.get_file_size"""
2250
trans_id = self._transform.trans_id_file_id(file_id)
2251
kind = self._transform.final_kind(trans_id)
2254
if trans_id in self._transform._new_contents:
2255
return self._stat_limbo_file(trans_id=trans_id).st_size
2256
if self.kind(file_id) == 'file':
2257
return self._transform._tree.get_file_size(file_id)
2261
def get_file_verifier(self, file_id, path=None, stat_value=None):
2262
trans_id = self._transform.trans_id_file_id(file_id)
2263
kind = self._transform._new_contents.get(trans_id)
2265
return self._transform._tree.get_file_verifier(file_id)
2267
fileobj = self.get_file(file_id)
2269
return ("SHA1", sha_file(fileobj))
2273
def get_file_sha1(self, file_id, path=None, stat_value=None):
2274
trans_id = self._transform.trans_id_file_id(file_id)
2275
kind = self._transform._new_contents.get(trans_id)
2277
return self._transform._tree.get_file_sha1(file_id)
2279
fileobj = self.get_file(file_id)
2281
return sha_file(fileobj)
2285
def is_executable(self, file_id, path=None):
2288
trans_id = self._transform.trans_id_file_id(file_id)
2290
return self._transform._new_executability[trans_id]
2293
return self._transform._tree.is_executable(file_id, path)
2295
if e.errno == errno.ENOENT:
2298
except errors.NoSuchId:
2301
def has_filename(self, path):
2302
trans_id = self._path2trans_id(path)
2303
if trans_id in self._transform._new_contents:
2305
elif trans_id in self._transform._removed_contents:
2308
return self._transform._tree.has_filename(path)
2310
def path_content_summary(self, path):
2311
trans_id = self._path2trans_id(path)
2312
tt = self._transform
2313
tree_path = tt._tree_id_paths.get(trans_id)
2314
kind = tt._new_contents.get(trans_id)
2316
if tree_path is None or trans_id in tt._removed_contents:
2317
return 'missing', None, None, None
2318
summary = tt._tree.path_content_summary(tree_path)
2319
kind, size, executable, link_or_sha1 = summary
2322
limbo_name = tt._limbo_name(trans_id)
2323
if trans_id in tt._new_reference_revision:
2324
kind = 'tree-reference'
2326
statval = os.lstat(limbo_name)
2327
size = statval.st_size
2328
if not supports_executable():
2331
executable = statval.st_mode & S_IEXEC
2335
if kind == 'symlink':
2336
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2337
executable = tt._new_executability.get(trans_id, executable)
2338
return kind, size, executable, link_or_sha1
2340
def iter_changes(self, from_tree, include_unchanged=False,
2341
specific_files=None, pb=None, extra_trees=None,
2342
require_versioned=True, want_unversioned=False):
2343
"""See InterTree.iter_changes.
2345
This has a fast path that is only used when the from_tree matches
2346
the transform tree, and no fancy options are supplied.
2348
if (from_tree is not self._transform._tree or include_unchanged or
2349
specific_files or want_unversioned):
2350
return tree.InterTree(from_tree, self).iter_changes(
2351
include_unchanged=include_unchanged,
2352
specific_files=specific_files,
2354
extra_trees=extra_trees,
2355
require_versioned=require_versioned,
2356
want_unversioned=want_unversioned)
2357
if want_unversioned:
2358
raise ValueError('want_unversioned is not supported')
2359
return self._transform.iter_changes()
2361
def get_file(self, file_id, path=None):
2362
"""See Tree.get_file"""
2363
if not self._content_change(file_id):
2364
return self._transform._tree.get_file(file_id, path)
2365
trans_id = self._transform.trans_id_file_id(file_id)
2366
name = self._transform._limbo_name(trans_id)
2367
return open(name, 'rb')
2369
def get_file_with_stat(self, file_id, path=None):
2370
return self.get_file(file_id, path), None
2372
def annotate_iter(self, file_id,
2373
default_revision=_mod_revision.CURRENT_REVISION):
2374
changes = self._iter_changes_cache.get(file_id)
2378
changed_content, versioned, kind = (changes[2], changes[3],
2382
get_old = (kind[0] == 'file' and versioned[0])
2384
old_annotation = self._transform._tree.annotate_iter(file_id,
2385
default_revision=default_revision)
2389
return old_annotation
2390
if not changed_content:
2391
return old_annotation
2392
# TODO: This is doing something similar to what WT.annotate_iter is
2393
# doing, however it fails slightly because it doesn't know what
2394
# the *other* revision_id is, so it doesn't know how to give the
2395
# other as the origin for some lines, they all get
2396
# 'default_revision'
2397
# It would be nice to be able to use the new Annotator based
2398
# approach, as well.
2399
return annotate.reannotate([old_annotation],
2400
self.get_file(file_id).readlines(),
2403
def get_symlink_target(self, file_id, path=None):
2404
"""See Tree.get_symlink_target"""
2405
if not self._content_change(file_id):
2406
return self._transform._tree.get_symlink_target(file_id)
2407
trans_id = self._transform.trans_id_file_id(file_id)
2408
name = self._transform._limbo_name(trans_id)
2409
return osutils.readlink(name)
2411
def walkdirs(self, prefix=''):
2412
pending = [self._transform.root]
2413
while len(pending) > 0:
2414
parent_id = pending.pop()
2417
prefix = prefix.rstrip('/')
2418
parent_path = self._final_paths.get_path(parent_id)
2419
parent_file_id = self._transform.final_file_id(parent_id)
2420
for child_id in self._all_children(parent_id):
2421
path_from_root = self._final_paths.get_path(child_id)
2422
basename = self._transform.final_name(child_id)
2423
file_id = self._transform.final_file_id(child_id)
2424
kind = self._transform.final_kind(child_id)
2425
if kind is not None:
2426
versioned_kind = kind
2429
versioned_kind = self._transform._tree.stored_kind(file_id)
2430
if versioned_kind == 'directory':
2431
subdirs.append(child_id)
2432
children.append((path_from_root, basename, kind, None,
2433
file_id, versioned_kind))
2435
if parent_path.startswith(prefix):
2436
yield (parent_path, parent_file_id), children
2437
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2440
def get_parent_ids(self):
2441
return self._parent_ids
2443
def set_parent_ids(self, parent_ids):
2444
self._parent_ids = parent_ids
2446
def get_revision_tree(self, revision_id):
2447
return self._transform._tree.get_revision_tree(revision_id)
864
2450
def joinpath(parent, child):
865
2451
"""Join tree-relative paths, handling the tree root specially"""
866
2452
if parent is None or parent == "":
896
2482
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2483
return self._known_paths[trans_id]
2485
def get_paths(self, trans_ids):
2486
return [(self.get_path(t), t) for t in trans_ids]
899
2490
def topology_sorted_ids(tree):
900
2491
"""Determine the topological order of the ids in a tree"""
901
2492
file_ids = list(tree)
902
2493
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2497
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2498
delta_from_tree=False):
2499
"""Create working tree for a branch, using a TreeTransform.
2501
This function should be used on empty trees, having a tree root at most.
2502
(see merge and revert functionality for working with existing trees)
2504
Existing files are handled like so:
2506
- Existing bzrdirs take precedence over creating new items. They are
2507
created as '%s.diverted' % name.
2508
- Otherwise, if the content on disk matches the content we are building,
2509
it is silently replaced.
2510
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2512
:param tree: The tree to convert wt into a copy of
2513
:param wt: The working tree that files will be placed into
2514
:param accelerator_tree: A tree which can be used for retrieving file
2515
contents more quickly than tree itself, i.e. a workingtree. tree
2516
will be used for cases where accelerator_tree's content is different.
2517
:param hardlink: If true, hard-link files to accelerator_tree, where
2518
possible. accelerator_tree must implement abspath, i.e. be a
2520
:param delta_from_tree: If true, build_tree may use the input Tree to
2521
generate the inventory delta.
2523
wt.lock_tree_write()
2527
if accelerator_tree is not None:
2528
accelerator_tree.lock_read()
2530
return _build_tree(tree, wt, accelerator_tree, hardlink,
2533
if accelerator_tree is not None:
2534
accelerator_tree.unlock()
2541
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2542
"""See build_tree."""
2543
for num, _unused in enumerate(wt.all_file_ids()):
2544
if num > 0: # more than just a root
2545
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2546
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2547
top_pb = ui.ui_factory.nested_progress_bar()
909
2548
pp = ProgressPhase("Build phase", 2, top_pb)
2549
if tree.inventory.root is not None:
2550
# This is kind of a hack: we should be altering the root
2551
# as part of the regular tree shape diff logic.
2552
# The conditional test here is to avoid doing an
2553
# expensive operation (flush) every time the root id
2554
# is set within the tree, nor setting the root and thus
2555
# marking the tree as dirty, because we use two different
2556
# idioms here: tree interfaces and inventory interfaces.
2557
if wt.get_root_id() != tree.get_root_id():
2558
wt.set_root_id(tree.get_root_id())
910
2560
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2564
file_trans_id[wt.get_root_id()] = \
2565
tt.trans_id_tree_file_id(wt.get_root_id())
2566
pb = ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2568
deferred_contents = []
2570
total = len(tree.inventory)
2572
precomputed_delta = []
2574
precomputed_delta = None
2575
# Check if tree inventory has content. If so, we populate
2576
# existing_files with the directory content. If there are no
2577
# entries we skip populating existing_files as its not used.
2578
# This improves performance and unncessary work on large
2579
# directory trees. (#501307)
2581
existing_files = set()
2582
for dir, files in wt.walkdirs():
2583
existing_files.update(f[0] for f in files)
2584
for num, (tree_path, entry) in \
2585
enumerate(tree.inventory.iter_entries_by_dir()):
2586
pb.update("Building tree", num - len(deferred_contents), total)
920
2587
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2590
file_id = entry.file_id
2592
precomputed_delta.append((None, tree_path, file_id, entry))
2593
if tree_path in existing_files:
2594
target_path = wt.abspath(tree_path)
2595
kind = file_kind(target_path)
2596
if kind == "directory":
2598
bzrdir.BzrDir.open(target_path)
2599
except errors.NotBranchError:
2603
if (file_id not in divert and
2604
_content_match(tree, entry, file_id, kind,
2606
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2607
if kind == 'directory':
924
2609
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2610
if entry.kind == 'file':
2611
# We *almost* replicate new_by_entry, so that we can defer
2612
# getting the file text, and get them all at once.
2613
trans_id = tt.create_path(entry.name, parent_id)
2614
file_trans_id[file_id] = trans_id
2615
tt.version_file(file_id, trans_id)
2616
executable = tree.is_executable(file_id, tree_path)
2618
tt.set_executability(executable, trans_id)
2619
trans_data = (trans_id, tree_path, entry.text_sha1)
2620
deferred_contents.append((file_id, trans_data))
2622
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2625
new_trans_id = file_trans_id[file_id]
2626
old_parent = tt.trans_id_tree_path(tree_path)
2627
_reparent_children(tt, old_parent, new_trans_id)
2628
offset = num + 1 - len(deferred_contents)
2629
_create_files(tt, tree, deferred_contents, pb, offset,
2630
accelerator_tree, hardlink)
2634
divert_trans = set(file_trans_id[f] for f in divert)
2635
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2636
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2637
if len(raw_conflicts) > 0:
2638
precomputed_delta = None
2639
conflicts = cook_conflicts(raw_conflicts, tt)
2640
for conflict in conflicts:
2641
trace.warning(unicode(conflict))
2643
wt.add_conflicts(conflicts)
2644
except errors.UnsupportedOperation:
2646
result = tt.apply(no_conflicts=True,
2647
precomputed_delta=precomputed_delta)
933
2650
top_pb.finished()
2654
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2656
total = len(desired_files) + offset
2658
if accelerator_tree is None:
2659
new_desired_files = desired_files
2661
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2662
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2663
in iter if not (c or e[0] != e[1])]
2664
if accelerator_tree.supports_content_filtering():
2665
unchanged = [(f, p) for (f, p) in unchanged
2666
if not accelerator_tree.iter_search_rules([p]).next()]
2667
unchanged = dict(unchanged)
2668
new_desired_files = []
2670
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2671
accelerator_path = unchanged.get(file_id)
2672
if accelerator_path is None:
2673
new_desired_files.append((file_id,
2674
(trans_id, tree_path, text_sha1)))
2676
pb.update('Adding file contents', count + offset, total)
2678
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2681
contents = accelerator_tree.get_file(file_id, accelerator_path)
2682
if wt.supports_content_filtering():
2683
filters = wt._content_filter_stack(tree_path)
2684
contents = filtered_output_bytes(contents, filters,
2685
ContentFilterContext(tree_path, tree))
2687
tt.create_file(contents, trans_id, sha1=text_sha1)
2691
except AttributeError:
2692
# after filtering, contents may no longer be file-like
2696
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2697
tree.iter_files_bytes(new_desired_files)):
2698
if wt.supports_content_filtering():
2699
filters = wt._content_filter_stack(tree_path)
2700
contents = filtered_output_bytes(contents, filters,
2701
ContentFilterContext(tree_path, tree))
2702
tt.create_file(contents, trans_id, sha1=text_sha1)
2703
pb.update('Adding file contents', count + offset, total)
2706
def _reparent_children(tt, old_parent, new_parent):
2707
for child in tt.iter_tree_children(old_parent):
2708
tt.adjust_path(tt.final_name(child), new_parent, child)
2711
def _reparent_transform_children(tt, old_parent, new_parent):
2712
by_parent = tt.by_parent()
2713
for child in by_parent[old_parent]:
2714
tt.adjust_path(tt.final_name(child), new_parent, child)
2715
return by_parent[old_parent]
2718
def _content_match(tree, entry, file_id, kind, target_path):
2719
if entry.kind != kind:
2721
if entry.kind == "directory":
2723
if entry.kind == "file":
2724
f = file(target_path, 'rb')
2726
if tree.get_file_text(file_id) == f.read():
2730
elif entry.kind == "symlink":
2731
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2736
def resolve_checkout(tt, conflicts, divert):
2737
new_conflicts = set()
2738
for c_type, conflict in ((c[0], c) for c in conflicts):
2739
# Anything but a 'duplicate' would indicate programmer error
2740
if c_type != 'duplicate':
2741
raise AssertionError(c_type)
2742
# Now figure out which is new and which is old
2743
if tt.new_contents(conflict[1]):
2744
new_file = conflict[1]
2745
old_file = conflict[2]
2747
new_file = conflict[2]
2748
old_file = conflict[1]
2750
# We should only get here if the conflict wasn't completely
2752
final_parent = tt.final_parent(old_file)
2753
if new_file in divert:
2754
new_name = tt.final_name(old_file)+'.diverted'
2755
tt.adjust_path(new_name, final_parent, new_file)
2756
new_conflicts.add((c_type, 'Diverted to',
2757
new_file, old_file))
2759
new_name = tt.final_name(old_file)+'.moved'
2760
tt.adjust_path(new_name, final_parent, old_file)
2761
new_conflicts.add((c_type, 'Moved existing file to',
2762
old_file, new_file))
2763
return new_conflicts
935
2766
def new_by_entry(tt, entry, parent_id, tree):
936
2767
"""Create a new file according to its inventory entry"""
1046
2851
working_kind = working_tree.kind(file_id)
1047
2852
has_contents = True
1049
if e.errno != errno.ENOENT:
1051
2854
has_contents = False
1052
2855
contents_mod = True
1053
2856
meta_mod = False
1054
2857
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
2858
if entry.kind != working_kind:
1059
2859
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
2861
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
2863
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
2864
cur_entry._forget_tree_state()
1065
2865
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2868
def revert(working_tree, target_tree, filenames, backups=False,
2869
pb=None, change_reporter=None):
1070
2870
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
2871
target_tree.lock_read()
2872
pb = ui.ui_factory.nested_progress_bar()
1075
2873
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2875
pp = ProgressPhase("Revert phase", 3, pb)
2876
conflicts, merge_modified = _prepare_revert_transform(
2877
working_tree, target_tree, tt, filenames, backups, pp)
2879
change_reporter = delta._ChangeReporter(
2880
unversioned_filter=working_tree.is_ignored)
2881
delta.report_changes(tt.iter_changes(), change_reporter)
2882
for conflict in conflicts:
2883
trace.warning(unicode(conflict))
2886
working_tree.set_merge_modified(merge_modified)
2888
target_tree.unlock()
2894
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2895
backups, pp, basis_tree=None,
2896
merge_modified=None):
2897
child_pb = ui.ui_factory.nested_progress_bar()
2899
if merge_modified is None:
2900
merge_modified = working_tree.merge_modified()
2901
merge_modified = _alter_files(working_tree, target_tree, tt,
2902
child_pb, filenames, backups,
2903
merge_modified, basis_tree)
2906
child_pb = ui.ui_factory.nested_progress_bar()
2908
raw_conflicts = resolve_conflicts(tt, child_pb,
2909
lambda t, c: conflict_pass(t, c, target_tree))
2912
conflicts = cook_conflicts(raw_conflicts, tt)
2913
return conflicts, merge_modified
2916
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2917
backups, merge_modified, basis_tree=None):
2918
if basis_tree is not None:
2919
basis_tree.lock_read()
2920
# We ask the working_tree for its changes relative to the target, rather
2921
# than the target changes relative to the working tree. Because WT4 has an
2922
# optimizer to compare itself to a target, but no optimizer for the
2924
change_list = working_tree.iter_changes(target_tree,
2925
specific_files=specific_files, pb=pb)
2926
if target_tree.get_root_id() is None:
2932
for id_num, (file_id, path, changed_content, versioned, parent, name,
2933
kind, executable) in enumerate(change_list):
2934
target_path, wt_path = path
2935
target_versioned, wt_versioned = versioned
2936
target_parent, wt_parent = parent
2937
target_name, wt_name = name
2938
target_kind, wt_kind = kind
2939
target_executable, wt_executable = executable
2940
if skip_root and wt_parent is None:
2942
trans_id = tt.trans_id_file_id(file_id)
2945
keep_content = False
2946
if wt_kind == 'file' and (backups or target_kind is None):
2947
wt_sha1 = working_tree.get_file_sha1(file_id)
2948
if merge_modified.get(file_id) != wt_sha1:
2949
# acquire the basis tree lazily to prevent the
2950
# expense of accessing it when it's not needed ?
2951
# (Guessing, RBC, 200702)
2952
if basis_tree is None:
2953
basis_tree = working_tree.basis_tree()
2954
basis_tree.lock_read()
2955
if basis_tree.has_id(file_id):
2956
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2958
elif target_kind is None and not target_versioned:
2960
if wt_kind is not None:
2961
if not keep_content:
1121
2962
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2963
elif target_kind is not None:
2964
parent_trans_id = tt.trans_id_file_id(wt_parent)
2965
backup_name = tt._available_backup_name(
2966
wt_name, parent_trans_id)
2967
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2968
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2969
if wt_versioned and target_versioned:
2970
tt.unversion_file(trans_id)
2971
tt.version_file(file_id, new_trans_id)
2972
# New contents should have the same unix perms as old
2975
trans_id = new_trans_id
2976
if target_kind in ('directory', 'tree-reference'):
2977
tt.create_directory(trans_id)
2978
if target_kind == 'tree-reference':
2979
revision = target_tree.get_reference_revision(file_id,
2981
tt.set_tree_reference(revision, trans_id)
2982
elif target_kind == 'symlink':
2983
tt.create_symlink(target_tree.get_symlink_target(file_id),
2985
elif target_kind == 'file':
2986
deferred_files.append((file_id, (trans_id, mode_id)))
2987
if basis_tree is None:
2988
basis_tree = working_tree.basis_tree()
2989
basis_tree.lock_read()
2990
new_sha1 = target_tree.get_file_sha1(file_id)
2991
if (basis_tree.has_id(file_id) and
2992
new_sha1 == basis_tree.get_file_sha1(file_id)):
2993
if file_id in merge_modified:
2994
del merge_modified[file_id]
2996
merge_modified[file_id] = new_sha1
2998
# preserve the execute bit when backing up
2999
if keep_content and wt_executable == target_executable:
3000
tt.set_executability(target_executable, trans_id)
3001
elif target_kind is not None:
3002
raise AssertionError(target_kind)
3003
if not wt_versioned and target_versioned:
3004
tt.version_file(file_id, trans_id)
3005
if wt_versioned and not target_versioned:
3006
tt.unversion_file(trans_id)
3007
if (target_name is not None and
3008
(wt_name != target_name or wt_parent != target_parent)):
3009
if target_name == '' and target_parent is None:
3010
parent_trans = ROOT_PARENT
3012
parent_trans = tt.trans_id_file_id(target_parent)
3013
if wt_parent is None and wt_versioned:
3014
tt.adjust_root_path(target_name, parent_trans)
3016
tt.adjust_path(target_name, parent_trans, trans_id)
3017
if wt_executable != target_executable and target_kind == "file":
3018
tt.set_executability(target_executable, trans_id)
3019
if working_tree.supports_content_filtering():
3020
for index, ((trans_id, mode_id), bytes) in enumerate(
3021
target_tree.iter_files_bytes(deferred_files)):
3022
file_id = deferred_files[index][0]
3023
# We're reverting a tree to the target tree so using the
3024
# target tree to find the file path seems the best choice
3025
# here IMO - Ian C 27/Oct/2009
3026
filter_tree_path = target_tree.id2path(file_id)
3027
filters = working_tree._content_filter_stack(filter_tree_path)
3028
bytes = filtered_output_bytes(bytes, filters,
3029
ContentFilterContext(filter_tree_path, working_tree))
3030
tt.create_file(bytes, trans_id, mode_id)
3032
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3034
tt.create_file(bytes, trans_id, mode_id)
3035
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
3037
if basis_tree is not None:
3039
return merge_modified
3042
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3043
"""Make many conflict-resolution attempts, but die if they fail"""
3044
if pass_func is None:
3045
pass_func = conflict_pass
1145
3046
new_conflicts = set()
3047
pb = ui.ui_factory.nested_progress_bar()
1147
3049
for n in range(10):
1148
3050
pb.update('Resolution pass', n+1, 10)
1149
3051
conflicts = tt.find_conflicts()
1150
3052
if len(conflicts) == 0:
1151
3053
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3054
new_conflicts.update(pass_func(tt, conflicts))
1153
3055
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3060
def conflict_pass(tt, conflicts, path_tree=None):
3061
"""Resolve some classes of conflicts.
3063
:param tt: The transform to resolve conflicts in
3064
:param conflicts: The conflicts to resolve
3065
:param path_tree: A Tree to get supplemental paths from
1160
3067
new_conflicts = set()
1161
3068
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3069
if c_type == 'duplicate id':