861
837
self.create_symlink(target, trans_id)
840
def new_orphan(self, trans_id, parent_id):
841
"""Schedule an item to be orphaned.
843
When a directory is about to be removed, its children, if they are not
844
versioned are moved out of the way: they don't have a parent anymore.
846
:param trans_id: The trans_id of the existing item.
847
:param parent_id: The parent trans_id of the item.
849
raise NotImplementedError(self.new_orphan)
851
def _get_potential_orphans(self, dir_id):
852
"""Find the potential orphans in a directory.
854
A directory can't be safely deleted if there are versioned files in it.
855
If all the contained files are unversioned then they can be orphaned.
857
The 'None' return value means that the directory contains at least one
858
versioned file and should not be deleted.
860
:param dir_id: The directory trans id.
862
:return: A list of the orphan trans ids or None if at least one
863
versioned file is present.
866
# Find the potential orphans, stop if one item should be kept
867
for child_tid in self.by_parent()[dir_id]:
868
if child_tid in self._removed_contents:
869
# The child is removed as part of the transform. Since it was
870
# versioned before, it's not an orphan
872
elif self.final_file_id(child_tid) is None:
873
# The child is not versioned
874
orphans.append(child_tid)
876
# We have a versioned file here, searching for orphans is
882
def _affected_ids(self):
883
"""Return the set of transform ids affected by the transform"""
884
trans_ids = set(self._removed_id)
885
trans_ids.update(self._new_id.keys())
886
trans_ids.update(self._removed_contents)
887
trans_ids.update(self._new_contents.keys())
888
trans_ids.update(self._new_executability.keys())
889
trans_ids.update(self._new_name.keys())
890
trans_ids.update(self._new_parent.keys())
893
def _get_file_id_maps(self):
894
"""Return mapping of file_ids to trans_ids in the to and from states"""
895
trans_ids = self._affected_ids()
898
# Build up two dicts: trans_ids associated with file ids in the
899
# FROM state, vs the TO state.
900
for trans_id in trans_ids:
901
from_file_id = self.tree_file_id(trans_id)
902
if from_file_id is not None:
903
from_trans_ids[from_file_id] = trans_id
904
to_file_id = self.final_file_id(trans_id)
905
if to_file_id is not None:
906
to_trans_ids[to_file_id] = trans_id
907
return from_trans_ids, to_trans_ids
909
def _from_file_data(self, from_trans_id, from_versioned, file_id):
910
"""Get data about a file in the from (tree) state
912
Return a (name, parent, kind, executable) tuple
914
from_path = self._tree_id_paths.get(from_trans_id)
916
# get data from working tree if versioned
917
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
918
from_name = from_entry.name
919
from_parent = from_entry.parent_id
922
if from_path is None:
923
# File does not exist in FROM state
927
# File exists, but is not versioned. Have to use path-
929
from_name = os.path.basename(from_path)
930
tree_parent = self.get_tree_parent(from_trans_id)
931
from_parent = self.tree_file_id(tree_parent)
932
if from_path is not None:
933
from_kind, from_executable, from_stats = \
934
self._tree._comparison_data(from_entry, from_path)
937
from_executable = False
938
return from_name, from_parent, from_kind, from_executable
940
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
941
"""Get data about a file in the to (target) state
943
Return a (name, parent, kind, executable) tuple
945
to_name = self.final_name(to_trans_id)
946
to_kind = self.final_kind(to_trans_id)
947
to_parent = self.final_file_id(self.final_parent(to_trans_id))
948
if to_trans_id in self._new_executability:
949
to_executable = self._new_executability[to_trans_id]
950
elif to_trans_id == from_trans_id:
951
to_executable = from_executable
953
to_executable = False
954
return to_name, to_parent, to_kind, to_executable
956
def iter_changes(self):
957
"""Produce output in the same format as Tree.iter_changes.
959
Will produce nonsensical results if invoked while inventory/filesystem
960
conflicts (as reported by TreeTransform.find_conflicts()) are present.
962
This reads the Transform, but only reproduces changes involving a
963
file_id. Files that are not versioned in either of the FROM or TO
964
states are not reflected.
966
final_paths = FinalPaths(self)
967
from_trans_ids, to_trans_ids = self._get_file_id_maps()
969
# Now iterate through all active file_ids
970
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
972
from_trans_id = from_trans_ids.get(file_id)
973
# find file ids, and determine versioning state
974
if from_trans_id is None:
975
from_versioned = False
976
from_trans_id = to_trans_ids[file_id]
978
from_versioned = True
979
to_trans_id = to_trans_ids.get(file_id)
980
if to_trans_id is None:
982
to_trans_id = from_trans_id
986
from_name, from_parent, from_kind, from_executable = \
987
self._from_file_data(from_trans_id, from_versioned, file_id)
989
to_name, to_parent, to_kind, to_executable = \
990
self._to_file_data(to_trans_id, from_trans_id, from_executable)
992
if not from_versioned:
995
from_path = self._tree_id_paths.get(from_trans_id)
999
to_path = final_paths.get_path(to_trans_id)
1000
if from_kind != to_kind:
1002
elif to_kind in ('file', 'symlink') and (
1003
to_trans_id != from_trans_id or
1004
to_trans_id in self._new_contents):
1006
if (not modified and from_versioned == to_versioned and
1007
from_parent==to_parent and from_name == to_name and
1008
from_executable == to_executable):
1010
results.append((file_id, (from_path, to_path), modified,
1011
(from_versioned, to_versioned),
1012
(from_parent, to_parent),
1013
(from_name, to_name),
1014
(from_kind, to_kind),
1015
(from_executable, to_executable)))
1016
return iter(sorted(results, key=lambda x:x[1]))
1018
def get_preview_tree(self):
1019
"""Return a tree representing the result of the transform.
1021
The tree is a snapshot, and altering the TreeTransform will invalidate
1024
return _PreviewTree(self)
1026
def commit(self, branch, message, merge_parents=None, strict=False,
1027
timestamp=None, timezone=None, committer=None, authors=None,
1028
revprops=None, revision_id=None):
1029
"""Commit the result of this TreeTransform to a branch.
1031
:param branch: The branch to commit to.
1032
:param message: The message to attach to the commit.
1033
:param merge_parents: Additional parent revision-ids specified by
1035
:param strict: If True, abort the commit if there are unversioned
1037
:param timestamp: if not None, seconds-since-epoch for the time and
1038
date. (May be a float.)
1039
:param timezone: Optional timezone for timestamp, as an offset in
1041
:param committer: Optional committer in email-id format.
1042
(e.g. "J Random Hacker <jrandom@example.com>")
1043
:param authors: Optional list of authors in email-id format.
1044
:param revprops: Optional dictionary of revision properties.
1045
:param revision_id: Optional revision id. (Specifying a revision-id
1046
may reduce performance for some non-native formats.)
1047
:return: The revision_id of the revision committed.
1049
self._check_malformed()
1051
unversioned = set(self._new_contents).difference(set(self._new_id))
1052
for trans_id in unversioned:
1053
if self.final_file_id(trans_id) is None:
1054
raise errors.StrictCommitFailed()
1056
revno, last_rev_id = branch.last_revision_info()
1057
if last_rev_id == _mod_revision.NULL_REVISION:
1058
if merge_parents is not None:
1059
raise ValueError('Cannot supply merge parents for first'
1063
parent_ids = [last_rev_id]
1064
if merge_parents is not None:
1065
parent_ids.extend(merge_parents)
1066
if self._tree.get_revision_id() != last_rev_id:
1067
raise ValueError('TreeTransform not based on branch basis: %s' %
1068
self._tree.get_revision_id())
1069
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1070
builder = branch.get_commit_builder(parent_ids,
1071
timestamp=timestamp,
1073
committer=committer,
1075
revision_id=revision_id)
1076
preview = self.get_preview_tree()
1077
list(builder.record_iter_changes(preview, last_rev_id,
1078
self.iter_changes()))
1079
builder.finish_inventory()
1080
revision_id = builder.commit(message)
1081
branch.set_last_revision_info(revno + 1, revision_id)
1084
def _text_parent(self, trans_id):
1085
file_id = self.tree_file_id(trans_id)
1087
if file_id is None or self._tree.kind(file_id) != 'file':
1089
except errors.NoSuchFile:
1093
def _get_parents_texts(self, trans_id):
1094
"""Get texts for compression parents of this file."""
1095
file_id = self._text_parent(trans_id)
1098
return (self._tree.get_file_text(file_id),)
1100
def _get_parents_lines(self, trans_id):
1101
"""Get lines for compression parents of this file."""
1102
file_id = self._text_parent(trans_id)
1105
return (self._tree.get_file_lines(file_id),)
1107
def serialize(self, serializer):
1108
"""Serialize this TreeTransform.
1110
:param serializer: A Serialiser like pack.ContainerSerializer.
1112
new_name = dict((k, v.encode('utf-8')) for k, v in
1113
self._new_name.items())
1114
new_executability = dict((k, int(v)) for k, v in
1115
self._new_executability.items())
1116
tree_path_ids = dict((k.encode('utf-8'), v)
1117
for k, v in self._tree_path_ids.items())
1119
'_id_number': self._id_number,
1120
'_new_name': new_name,
1121
'_new_parent': self._new_parent,
1122
'_new_executability': new_executability,
1123
'_new_id': self._new_id,
1124
'_tree_path_ids': tree_path_ids,
1125
'_removed_id': list(self._removed_id),
1126
'_removed_contents': list(self._removed_contents),
1127
'_non_present_ids': self._non_present_ids,
1129
yield serializer.bytes_record(bencode.bencode(attribs),
1131
for trans_id, kind in self._new_contents.items():
1133
lines = osutils.chunks_to_lines(
1134
self._read_file_chunks(trans_id))
1135
parents = self._get_parents_lines(trans_id)
1136
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1137
content = ''.join(mpdiff.to_patch())
1138
if kind == 'directory':
1140
if kind == 'symlink':
1141
content = self._read_symlink_target(trans_id)
1142
yield serializer.bytes_record(content, ((trans_id, kind),))
1144
def deserialize(self, records):
1145
"""Deserialize a stored TreeTransform.
1147
:param records: An iterable of (names, content) tuples, as per
1148
pack.ContainerPushParser.
1150
names, content = records.next()
1151
attribs = bencode.bdecode(content)
1152
self._id_number = attribs['_id_number']
1153
self._new_name = dict((k, v.decode('utf-8'))
1154
for k, v in attribs['_new_name'].items())
1155
self._new_parent = attribs['_new_parent']
1156
self._new_executability = dict((k, bool(v)) for k, v in
1157
attribs['_new_executability'].items())
1158
self._new_id = attribs['_new_id']
1159
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1160
self._tree_path_ids = {}
1161
self._tree_id_paths = {}
1162
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1163
path = bytepath.decode('utf-8')
1164
self._tree_path_ids[path] = trans_id
1165
self._tree_id_paths[trans_id] = path
1166
self._removed_id = set(attribs['_removed_id'])
1167
self._removed_contents = set(attribs['_removed_contents'])
1168
self._non_present_ids = attribs['_non_present_ids']
1169
for ((trans_id, kind),), content in records:
1171
mpdiff = multiparent.MultiParent.from_patch(content)
1172
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1173
self.create_file(lines, trans_id)
1174
if kind == 'directory':
1175
self.create_directory(trans_id)
1176
if kind == 'symlink':
1177
self.create_symlink(content.decode('utf-8'), trans_id)
1180
class DiskTreeTransform(TreeTransformBase):
1181
"""Tree transform storing its contents on disk."""
1183
def __init__(self, tree, limbodir, pb=None,
1184
case_sensitive=True):
1186
:param tree: The tree that will be transformed, but not necessarily
1188
:param limbodir: A directory where new files can be stored until
1189
they are installed in their proper places
1191
:param case_sensitive: If True, the target of the transform is
1192
case sensitive, not just case preserving.
1194
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1195
self._limbodir = limbodir
1196
self._deletiondir = None
1197
# A mapping of transform ids to their limbo filename
1198
self._limbo_files = {}
1199
self._possibly_stale_limbo_files = set()
1200
# A mapping of transform ids to a set of the transform ids of children
1201
# that their limbo directory has
1202
self._limbo_children = {}
1203
# Map transform ids to maps of child filename to child transform id
1204
self._limbo_children_names = {}
1205
# List of transform ids that need to be renamed from limbo into place
1206
self._needs_rename = set()
1207
self._creation_mtime = None
1210
"""Release the working tree lock, if held, clean up limbo dir.
1212
This is required if apply has not been invoked, but can be invoked
1215
if self._tree is None:
1218
limbo_paths = self._limbo_files.values() + list(
1219
self._possibly_stale_limbo_files)
1220
limbo_paths = sorted(limbo_paths, reverse=True)
1221
for path in limbo_paths:
1225
if e.errno != errno.ENOENT:
1227
# XXX: warn? perhaps we just got interrupted at an
1228
# inconvenient moment, but perhaps files are disappearing
1231
delete_any(self._limbodir)
1233
# We don't especially care *why* the dir is immortal.
1234
raise ImmortalLimbo(self._limbodir)
1236
if self._deletiondir is not None:
1237
delete_any(self._deletiondir)
1239
raise errors.ImmortalPendingDeletion(self._deletiondir)
1241
TreeTransformBase.finalize(self)
1243
def _limbo_name(self, trans_id):
1244
"""Generate the limbo name of a file"""
1245
limbo_name = self._limbo_files.get(trans_id)
1246
if limbo_name is None:
1247
limbo_name = self._generate_limbo_path(trans_id)
1248
self._limbo_files[trans_id] = limbo_name
1251
def _generate_limbo_path(self, trans_id):
1252
"""Generate a limbo path using the trans_id as the relative path.
1254
This is suitable as a fallback, and when the transform should not be
1255
sensitive to the path encoding of the limbo directory.
1257
self._needs_rename.add(trans_id)
1258
return pathjoin(self._limbodir, trans_id)
1260
def adjust_path(self, name, parent, trans_id):
1261
previous_parent = self._new_parent.get(trans_id)
1262
previous_name = self._new_name.get(trans_id)
1263
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1264
if (trans_id in self._limbo_files and
1265
trans_id not in self._needs_rename):
1266
self._rename_in_limbo([trans_id])
1267
if previous_parent != parent:
1268
self._limbo_children[previous_parent].remove(trans_id)
1269
if previous_parent != parent or previous_name != name:
1270
del self._limbo_children_names[previous_parent][previous_name]
1272
def _rename_in_limbo(self, trans_ids):
1273
"""Fix limbo names so that the right final path is produced.
1275
This means we outsmarted ourselves-- we tried to avoid renaming
1276
these files later by creating them with their final names in their
1277
final parents. But now the previous name or parent is no longer
1278
suitable, so we have to rename them.
1280
Even for trans_ids that have no new contents, we must remove their
1281
entries from _limbo_files, because they are now stale.
1283
for trans_id in trans_ids:
1284
old_path = self._limbo_files[trans_id]
1285
self._possibly_stale_limbo_files.add(old_path)
1286
del self._limbo_files[trans_id]
1287
if trans_id not in self._new_contents:
1289
new_path = self._limbo_name(trans_id)
1290
os.rename(old_path, new_path)
1291
self._possibly_stale_limbo_files.remove(old_path)
1292
for descendant in self._limbo_descendants(trans_id):
1293
desc_path = self._limbo_files[descendant]
1294
desc_path = new_path + desc_path[len(old_path):]
1295
self._limbo_files[descendant] = desc_path
1297
def _limbo_descendants(self, trans_id):
1298
"""Return the set of trans_ids whose limbo paths descend from this."""
1299
descendants = set(self._limbo_children.get(trans_id, []))
1300
for descendant in list(descendants):
1301
descendants.update(self._limbo_descendants(descendant))
1304
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1305
"""Schedule creation of a new file.
1309
:param contents: an iterator of strings, all of which will be written
1310
to the target destination.
1311
:param trans_id: TreeTransform handle
1312
:param mode_id: If not None, force the mode of the target file to match
1313
the mode of the object referenced by mode_id.
1314
Otherwise, we will try to preserve mode bits of an existing file.
1315
:param sha1: If the sha1 of this content is already known, pass it in.
1316
We can use it to prevent future sha1 computations.
1318
name = self._limbo_name(trans_id)
1319
f = open(name, 'wb')
1321
unique_add(self._new_contents, trans_id, 'file')
1322
f.writelines(contents)
1325
self._set_mtime(name)
1326
self._set_mode(trans_id, mode_id, S_ISREG)
1327
# It is unfortunate we have to use lstat instead of fstat, but we just
1328
# used utime and chmod on the file, so we need the accurate final
1330
if sha1 is not None:
1331
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1333
def _read_file_chunks(self, trans_id):
1334
cur_file = open(self._limbo_name(trans_id), 'rb')
1336
return cur_file.readlines()
1340
def _read_symlink_target(self, trans_id):
1341
return os.readlink(self._limbo_name(trans_id))
1343
def _set_mtime(self, path):
1344
"""All files that are created get the same mtime.
1346
This time is set by the first object to be created.
1348
if self._creation_mtime is None:
1349
self._creation_mtime = time.time()
1350
os.utime(path, (self._creation_mtime, self._creation_mtime))
1352
def create_hardlink(self, path, trans_id):
1353
"""Schedule creation of a hard link"""
1354
name = self._limbo_name(trans_id)
1358
if e.errno != errno.EPERM:
1360
raise errors.HardLinkNotSupported(path)
1362
unique_add(self._new_contents, trans_id, 'file')
1364
# Clean up the file, it never got registered so
1365
# TreeTransform.finalize() won't clean it up.
1369
def create_directory(self, trans_id):
1370
"""Schedule creation of a new directory.
1372
See also new_directory.
1374
os.mkdir(self._limbo_name(trans_id))
1375
unique_add(self._new_contents, trans_id, 'directory')
1377
def create_symlink(self, target, trans_id):
1378
"""Schedule creation of a new symbolic link.
1380
target is a bytestring.
1381
See also new_symlink.
1384
os.symlink(target, self._limbo_name(trans_id))
1385
unique_add(self._new_contents, trans_id, 'symlink')
1388
path = FinalPaths(self).get_path(trans_id)
1391
raise UnableCreateSymlink(path=path)
1393
def cancel_creation(self, trans_id):
1394
"""Cancel the creation of new file contents."""
1395
del self._new_contents[trans_id]
1396
if trans_id in self._observed_sha1s:
1397
del self._observed_sha1s[trans_id]
1398
children = self._limbo_children.get(trans_id)
1399
# if this is a limbo directory with children, move them before removing
1401
if children is not None:
1402
self._rename_in_limbo(children)
1403
del self._limbo_children[trans_id]
1404
del self._limbo_children_names[trans_id]
1405
delete_any(self._limbo_name(trans_id))
1407
def new_orphan(self, trans_id, parent_id):
1408
# FIXME: There is no tree config, so we use the branch one (it's weird
1409
# to define it this way as orphaning can only occur in a working tree,
1410
# but that's all we have (for now). It will find the option in
1411
# locations.conf or bazaar.conf though) -- vila 20100916
1412
conf = self._tree.branch.get_config()
1413
conf_var_name = 'bzr.transform.orphan_policy'
1414
orphan_policy = conf.get_user_option(conf_var_name)
1415
default_policy = orphaning_registry.default_key
1416
if orphan_policy is None:
1417
orphan_policy = default_policy
1418
if orphan_policy not in orphaning_registry:
1419
trace.warning('%s (from %s) is not a known policy, defaulting '
1420
'to %s' % (orphan_policy, conf_var_name, default_policy))
1421
orphan_policy = default_policy
1422
handle_orphan = orphaning_registry.get(orphan_policy)
1423
handle_orphan(self, trans_id, parent_id)
1426
class OrphaningError(errors.BzrError):
1428
# Only bugs could lead to such exception being seen by the user
1429
internal_error = True
1430
_fmt = "Error while orphaning %s in %s directory"
1432
def __init__(self, orphan, parent):
1433
errors.BzrError.__init__(self)
1434
self.orphan = orphan
1435
self.parent = parent
1438
class OrphaningForbidden(OrphaningError):
1440
_fmt = "Policy: %s doesn't allow creating orphans."
1442
def __init__(self, policy):
1443
errors.BzrError.__init__(self)
1444
self.policy = policy
1447
def move_orphan(tt, orphan_id, parent_id):
1448
"""See TreeTransformBase.new_orphan.
1450
This creates a new orphan in the `bzr-orphans` dir at the root of the
1453
:param tt: The TreeTransform orphaning `trans_id`.
1455
:param orphan_id: The trans id that should be orphaned.
1457
:param parent_id: The orphan parent trans id.
1459
# Add the orphan dir if it doesn't exist
1460
orphan_dir_basename = 'bzr-orphans'
1461
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1462
if tt.final_kind(od_id) is None:
1463
tt.create_directory(od_id)
1464
parent_path = tt._tree_id_paths[parent_id]
1465
# Find a name that doesn't exist yet in the orphan dir
1466
actual_name = tt.final_name(orphan_id)
1467
new_name = tt._available_backup_name(actual_name, od_id)
1468
tt.adjust_path(new_name, od_id, orphan_id)
1469
trace.warning('%s has been orphaned in %s'
1470
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1473
def refuse_orphan(tt, orphan_id, parent_id):
1474
"""See TreeTransformBase.new_orphan.
1476
This refuses to create orphan, letting the caller handle the conflict.
1478
raise OrphaningForbidden('never')
1481
orphaning_registry = registry.Registry()
1482
orphaning_registry.register(
1483
'conflict', refuse_orphan,
1484
'Leave orphans in place and create a conflict on the directory.')
1485
orphaning_registry.register(
1486
'move', move_orphan,
1487
'Move orphans into the bzr-orphans directory.')
1488
orphaning_registry._set_default_key('conflict')
1491
class TreeTransform(DiskTreeTransform):
1492
"""Represent a tree transformation.
1494
This object is designed to support incremental generation of the transform,
1497
However, it gives optimum performance when parent directories are created
1498
before their contents. The transform is then able to put child files
1499
directly in their parent directory, avoiding later renames.
1501
It is easy to produce malformed transforms, but they are generally
1502
harmless. Attempting to apply a malformed transform will cause an
1503
exception to be raised before any modifications are made to the tree.
1505
Many kinds of malformed transforms can be corrected with the
1506
resolve_conflicts function. The remaining ones indicate programming error,
1507
such as trying to create a file with no path.
1509
Two sets of file creation methods are supplied. Convenience methods are:
1514
These are composed of the low-level methods:
1516
* create_file or create_directory or create_symlink
1520
Transform/Transaction ids
1521
-------------------------
1522
trans_ids are temporary ids assigned to all files involved in a transform.
1523
It's possible, even common, that not all files in the Tree have trans_ids.
1525
trans_ids are used because filenames and file_ids are not good enough
1526
identifiers; filenames change, and not all files have file_ids. File-ids
1527
are also associated with trans-ids, so that moving a file moves its
1530
trans_ids are only valid for the TreeTransform that generated them.
1534
Limbo is a temporary directory use to hold new versions of files.
1535
Files are added to limbo by create_file, create_directory, create_symlink,
1536
and their convenience variants (new_*). Files may be removed from limbo
1537
using cancel_creation. Files are renamed from limbo into their final
1538
location as part of TreeTransform.apply
1540
Limbo must be cleaned up, by either calling TreeTransform.apply or
1541
calling TreeTransform.finalize.
1543
Files are placed into limbo inside their parent directories, where
1544
possible. This reduces subsequent renames, and makes operations involving
1545
lots of files faster. This optimization is only possible if the parent
1546
directory is created *before* creating any of its children, so avoid
1547
creating children before parents, where possible.
1551
This temporary directory is used by _FileMover for storing files that are
1552
about to be deleted. In case of rollback, the files will be restored.
1553
FileMover does not delete files until it is sure that a rollback will not
1556
def __init__(self, tree, pb=None):
1557
"""Note: a tree_write lock is taken on the tree.
1559
Use TreeTransform.finalize() to release the lock (can be omitted if
1560
TreeTransform.apply() called).
1562
tree.lock_tree_write()
1565
limbodir = urlutils.local_path_from_url(
1566
tree._transport.abspath('limbo'))
1570
if e.errno == errno.EEXIST:
1571
raise ExistingLimbo(limbodir)
1572
deletiondir = urlutils.local_path_from_url(
1573
tree._transport.abspath('pending-deletion'))
1575
os.mkdir(deletiondir)
1577
if e.errno == errno.EEXIST:
1578
raise errors.ExistingPendingDeletion(deletiondir)
1583
# Cache of realpath results, to speed up canonical_path
1584
self._realpaths = {}
1585
# Cache of relpath results, to speed up canonical_path
1587
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1588
tree.case_sensitive)
1589
self._deletiondir = deletiondir
1591
def canonical_path(self, path):
1592
"""Get the canonical tree-relative path"""
1593
# don't follow final symlinks
1594
abs = self._tree.abspath(path)
1595
if abs in self._relpaths:
1596
return self._relpaths[abs]
1597
dirname, basename = os.path.split(abs)
1598
if dirname not in self._realpaths:
1599
self._realpaths[dirname] = os.path.realpath(dirname)
1600
dirname = self._realpaths[dirname]
1601
abs = pathjoin(dirname, basename)
1602
if dirname in self._relpaths:
1603
relpath = pathjoin(self._relpaths[dirname], basename)
1604
relpath = relpath.rstrip('/\\')
1606
relpath = self._tree.relpath(abs)
1607
self._relpaths[abs] = relpath
1610
def tree_kind(self, trans_id):
1611
"""Determine the file kind in the working tree.
1613
:returns: The file kind or None if the file does not exist
1615
path = self._tree_id_paths.get(trans_id)
1619
return file_kind(self._tree.abspath(path))
1620
except errors.NoSuchFile:
1623
def _set_mode(self, trans_id, mode_id, typefunc):
1624
"""Set the mode of new file contents.
1625
The mode_id is the existing file to get the mode from (often the same
1626
as trans_id). The operation is only performed if there's a mode match
1627
according to typefunc.
1632
old_path = self._tree_id_paths[mode_id]
1636
mode = os.stat(self._tree.abspath(old_path)).st_mode
1638
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1639
# Either old_path doesn't exist, or the parent of the
1640
# target is not a directory (but will be one eventually)
1641
# Either way, we know it doesn't exist *right now*
1642
# See also bug #248448
1647
os.chmod(self._limbo_name(trans_id), mode)
1649
def iter_tree_children(self, parent_id):
1650
"""Iterate through the entry's tree children, if any"""
1652
path = self._tree_id_paths[parent_id]
1656
children = os.listdir(self._tree.abspath(path))
1658
if not (osutils._is_error_enotdir(e)
1659
or e.errno in (errno.ENOENT, errno.ESRCH)):
1663
for child in children:
1664
childpath = joinpath(path, child)
1665
if self._tree.is_control_filename(childpath):
1667
yield self.trans_id_tree_path(childpath)
1669
def _generate_limbo_path(self, trans_id):
1670
"""Generate a limbo path using the final path if possible.
1672
This optimizes the performance of applying the tree transform by
1673
avoiding renames. These renames can be avoided only when the parent
1674
directory is already scheduled for creation.
1676
If the final path cannot be used, falls back to using the trans_id as
1679
parent = self._new_parent.get(trans_id)
1680
# if the parent directory is already in limbo (e.g. when building a
1681
# tree), choose a limbo name inside the parent, to reduce further
1683
use_direct_path = False
1684
if self._new_contents.get(parent) == 'directory':
1685
filename = self._new_name.get(trans_id)
1686
if filename is not None:
1687
if parent not in self._limbo_children:
1688
self._limbo_children[parent] = set()
1689
self._limbo_children_names[parent] = {}
1690
use_direct_path = True
1691
# the direct path can only be used if no other file has
1692
# already taken this pathname, i.e. if the name is unused, or
1693
# if it is already associated with this trans_id.
1694
elif self._case_sensitive_target:
1695
if (self._limbo_children_names[parent].get(filename)
1696
in (trans_id, None)):
1697
use_direct_path = True
1699
for l_filename, l_trans_id in\
1700
self._limbo_children_names[parent].iteritems():
1701
if l_trans_id == trans_id:
1703
if l_filename.lower() == filename.lower():
1706
use_direct_path = True
1708
if not use_direct_path:
1709
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1711
limbo_name = pathjoin(self._limbo_files[parent], filename)
1712
self._limbo_children[parent].add(trans_id)
1713
self._limbo_children_names[parent][filename] = trans_id
1717
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1718
"""Apply all changes to the inventory and filesystem.
1720
If filesystem or inventory conflicts are present, MalformedTransform
1723
If apply succeeds, finalize is not necessary.
1725
:param no_conflicts: if True, the caller guarantees there are no
1726
conflicts, so no check is made.
1727
:param precomputed_delta: An inventory delta to use instead of
1729
:param _mover: Supply an alternate FileMover, for testing
1731
if not no_conflicts:
1732
self._check_malformed()
1733
child_pb = ui.ui_factory.nested_progress_bar()
1735
if precomputed_delta is None:
1736
child_pb.update('Apply phase', 0, 2)
1737
inventory_delta = self._generate_inventory_delta()
1740
inventory_delta = precomputed_delta
1743
mover = _FileMover()
1747
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1748
self._apply_removals(mover)
1749
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1750
modified_paths = self._apply_insertions(mover)
1755
mover.apply_deletions()
1758
self._tree.apply_inventory_delta(inventory_delta)
1759
self._apply_observed_sha1s()
1762
return _TransformResults(modified_paths, self.rename_count)
1764
def _generate_inventory_delta(self):
1765
"""Generate an inventory delta for the current transform."""
1766
inventory_delta = []
1767
child_pb = ui.ui_factory.nested_progress_bar()
1768
new_paths = self._inventory_altered()
1769
total_entries = len(new_paths) + len(self._removed_id)
1771
for num, trans_id in enumerate(self._removed_id):
1773
child_pb.update('removing file', num, total_entries)
1774
if trans_id == self._new_root:
1775
file_id = self._tree.get_root_id()
1777
file_id = self.tree_file_id(trans_id)
1778
# File-id isn't really being deleted, just moved
1779
if file_id in self._r_new_id:
1781
path = self._tree_id_paths[trans_id]
1782
inventory_delta.append((path, None, file_id, None))
1783
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1785
entries = self._tree.iter_entries_by_dir(
1786
new_path_file_ids.values())
1787
old_paths = dict((e.file_id, p) for p, e in entries)
1789
for num, (path, trans_id) in enumerate(new_paths):
1791
child_pb.update('adding file',
1792
num + len(self._removed_id), total_entries)
1793
file_id = new_path_file_ids[trans_id]
1797
kind = self.final_kind(trans_id)
1799
kind = self._tree.stored_kind(file_id)
1800
parent_trans_id = self.final_parent(trans_id)
1801
parent_file_id = new_path_file_ids.get(parent_trans_id)
1802
if parent_file_id is None:
1803
parent_file_id = self.final_file_id(parent_trans_id)
1804
if trans_id in self._new_reference_revision:
1805
new_entry = inventory.TreeReference(
1807
self._new_name[trans_id],
1808
self.final_file_id(self._new_parent[trans_id]),
1809
None, self._new_reference_revision[trans_id])
1811
new_entry = inventory.make_entry(kind,
1812
self.final_name(trans_id),
1813
parent_file_id, file_id)
1814
old_path = old_paths.get(new_entry.file_id)
1815
new_executability = self._new_executability.get(trans_id)
1816
if new_executability is not None:
1817
new_entry.executable = new_executability
1818
inventory_delta.append(
1819
(old_path, path, new_entry.file_id, new_entry))
1822
return inventory_delta
1824
def _apply_removals(self, mover):
1825
"""Perform tree operations that remove directory/inventory names.
1827
That is, delete files that are to be deleted, and put any files that
1828
need renaming into limbo. This must be done in strict child-to-parent
1831
If inventory_delta is None, no inventory delta generation is performed.
1833
tree_paths = list(self._tree_path_ids.iteritems())
1834
tree_paths.sort(reverse=True)
1835
child_pb = ui.ui_factory.nested_progress_bar()
1837
for num, (path, trans_id) in enumerate(tree_paths):
1838
# do not attempt to move root into a subdirectory of itself.
1841
child_pb.update('removing file', num, len(tree_paths))
1842
full_path = self._tree.abspath(path)
1843
if trans_id in self._removed_contents:
1844
delete_path = os.path.join(self._deletiondir, trans_id)
1845
mover.pre_delete(full_path, delete_path)
1846
elif (trans_id in self._new_name
1847
or trans_id in self._new_parent):
1849
mover.rename(full_path, self._limbo_name(trans_id))
1850
except errors.TransformRenameFailed, e:
1851
if e.errno != errno.ENOENT:
1854
self.rename_count += 1
1858
def _apply_insertions(self, mover):
1859
"""Perform tree operations that insert directory/inventory names.
1861
That is, create any files that need to be created, and restore from
1862
limbo any files that needed renaming. This must be done in strict
1863
parent-to-child order.
1865
If inventory_delta is None, no inventory delta is calculated, and
1866
no list of modified paths is returned.
1868
new_paths = self.new_paths(filesystem_only=True)
1870
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1872
child_pb = ui.ui_factory.nested_progress_bar()
1874
for num, (path, trans_id) in enumerate(new_paths):
1876
child_pb.update('adding file', num, len(new_paths))
1877
full_path = self._tree.abspath(path)
1878
if trans_id in self._needs_rename:
1880
mover.rename(self._limbo_name(trans_id), full_path)
1881
except errors.TransformRenameFailed, e:
1882
# We may be renaming a dangling inventory id
1883
if e.errno != errno.ENOENT:
1886
self.rename_count += 1
1887
# TODO: if trans_id in self._observed_sha1s, we should
1888
# re-stat the final target, since ctime will be
1889
# updated by the change.
1890
if (trans_id in self._new_contents or
1891
self.path_changed(trans_id)):
1892
if trans_id in self._new_contents:
1893
modified_paths.append(full_path)
1894
if trans_id in self._new_executability:
1895
self._set_executability(path, trans_id)
1896
if trans_id in self._observed_sha1s:
1897
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1898
st = osutils.lstat(full_path)
1899
self._observed_sha1s[trans_id] = (o_sha1, st)
1902
for path, trans_id in new_paths:
1903
# new_paths includes stuff like workingtree conflicts. Only the
1904
# stuff in new_contents actually comes from limbo.
1905
if trans_id in self._limbo_files:
1906
del self._limbo_files[trans_id]
1907
self._new_contents.clear()
1908
return modified_paths
1910
def _apply_observed_sha1s(self):
1911
"""After we have finished renaming everything, update observed sha1s
1913
This has to be done after self._tree.apply_inventory_delta, otherwise
1914
it doesn't know anything about the files we are updating. Also, we want
1915
to do this as late as possible, so that most entries end up cached.
1917
# TODO: this doesn't update the stat information for directories. So
1918
# the first 'bzr status' will still need to rewrite
1919
# .bzr/checkout/dirstate. However, we at least don't need to
1920
# re-read all of the files.
1921
# TODO: If the operation took a while, we could do a time.sleep(3) here
1922
# to allow the clock to tick over and ensure we won't have any
1923
# problems. (we could observe start time, and finish time, and if
1924
# it is less than eg 10% overhead, add a sleep call.)
1925
paths = FinalPaths(self)
1926
for trans_id, observed in self._observed_sha1s.iteritems():
1927
path = paths.get_path(trans_id)
1928
# We could get the file_id, but dirstate prefers to use the path
1929
# anyway, and it is 'cheaper' to determine.
1930
# file_id = self._new_id[trans_id]
1931
self._tree._observed_sha1(None, path, observed)
1934
class TransformPreview(DiskTreeTransform):
1935
"""A TreeTransform for generating preview trees.
1937
Unlike TreeTransform, this version works when the input tree is a
1938
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1939
unversioned files in the input tree.
1942
def __init__(self, tree, pb=None, case_sensitive=True):
1944
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1945
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1947
def canonical_path(self, path):
1950
def tree_kind(self, trans_id):
1951
path = self._tree_id_paths.get(trans_id)
1954
kind = self._tree.path_content_summary(path)[0]
1955
if kind == 'missing':
1959
def _set_mode(self, trans_id, mode_id, typefunc):
1960
"""Set the mode of new file contents.
1961
The mode_id is the existing file to get the mode from (often the same
1962
as trans_id). The operation is only performed if there's a mode match
1963
according to typefunc.
1965
# is it ok to ignore this? probably
1968
def iter_tree_children(self, parent_id):
1969
"""Iterate through the entry's tree children, if any"""
1971
path = self._tree_id_paths[parent_id]
1974
file_id = self.tree_file_id(parent_id)
1977
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1978
children = getattr(entry, 'children', {})
1979
for child in children:
1980
childpath = joinpath(path, child)
1981
yield self.trans_id_tree_path(childpath)
1983
def new_orphan(self, trans_id, parent_id):
1984
raise NotImplementedError(self.new_orphan)
1987
class _PreviewTree(tree.InventoryTree):
1988
"""Partial implementation of Tree to support show_diff_trees"""
1990
def __init__(self, transform):
1991
self._transform = transform
1992
self._final_paths = FinalPaths(transform)
1993
self.__by_parent = None
1994
self._parent_ids = []
1995
self._all_children_cache = {}
1996
self._path2trans_id_cache = {}
1997
self._final_name_cache = {}
1998
self._iter_changes_cache = dict((c[0], c) for c in
1999
self._transform.iter_changes())
2001
def _content_change(self, file_id):
2002
"""Return True if the content of this file changed"""
2003
changes = self._iter_changes_cache.get(file_id)
2004
# changes[2] is true if the file content changed. See
2005
# InterTree.iter_changes.
2006
return (changes is not None and changes[2])
2008
def _get_repository(self):
2009
repo = getattr(self._transform._tree, '_repository', None)
2011
repo = self._transform._tree.branch.repository
2014
def _iter_parent_trees(self):
2015
for revision_id in self.get_parent_ids():
2017
yield self.revision_tree(revision_id)
2018
except errors.NoSuchRevisionInTree:
2019
yield self._get_repository().revision_tree(revision_id)
2021
def _get_file_revision(self, file_id, vf, tree_revision):
2022
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2023
self._iter_parent_trees()]
2024
vf.add_lines((file_id, tree_revision), parent_keys,
2025
self.get_file_lines(file_id))
2026
repo = self._get_repository()
2027
base_vf = repo.texts
2028
if base_vf not in vf.fallback_versionedfiles:
2029
vf.fallback_versionedfiles.append(base_vf)
2030
return tree_revision
2032
def _stat_limbo_file(self, file_id=None, trans_id=None):
2033
if trans_id is None:
2034
trans_id = self._transform.trans_id_file_id(file_id)
2035
name = self._transform._limbo_name(trans_id)
2036
return os.lstat(name)
2039
def _by_parent(self):
2040
if self.__by_parent is None:
2041
self.__by_parent = self._transform.by_parent()
2042
return self.__by_parent
2044
def _comparison_data(self, entry, path):
2045
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2046
if kind == 'missing':
2050
file_id = self._transform.final_file_id(self._path2trans_id(path))
2051
executable = self.is_executable(file_id, path)
2052
return kind, executable, None
2054
def is_locked(self):
2057
def lock_read(self):
2058
# Perhaps in theory, this should lock the TreeTransform?
2065
def inventory(self):
2066
"""This Tree does not use inventory as its backing data."""
2067
raise NotImplementedError(_PreviewTree.inventory)
2069
def get_root_id(self):
2070
return self._transform.final_file_id(self._transform.root)
2072
def all_file_ids(self):
2073
tree_ids = set(self._transform._tree.all_file_ids())
2074
tree_ids.difference_update(self._transform.tree_file_id(t)
2075
for t in self._transform._removed_id)
2076
tree_ids.update(self._transform._new_id.values())
2080
return iter(self.all_file_ids())
2082
def _has_id(self, file_id, fallback_check):
2083
if file_id in self._transform._r_new_id:
2085
elif file_id in set([self._transform.tree_file_id(trans_id) for
2086
trans_id in self._transform._removed_id]):
2089
return fallback_check(file_id)
2091
def has_id(self, file_id):
2092
return self._has_id(file_id, self._transform._tree.has_id)
2094
def has_or_had_id(self, file_id):
2095
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2097
def _path2trans_id(self, path):
2098
# We must not use None here, because that is a valid value to store.
2099
trans_id = self._path2trans_id_cache.get(path, object)
2100
if trans_id is not object:
2102
segments = splitpath(path)
2103
cur_parent = self._transform.root
2104
for cur_segment in segments:
2105
for child in self._all_children(cur_parent):
2106
final_name = self._final_name_cache.get(child)
2107
if final_name is None:
2108
final_name = self._transform.final_name(child)
2109
self._final_name_cache[child] = final_name
2110
if final_name == cur_segment:
2114
self._path2trans_id_cache[path] = None
2116
self._path2trans_id_cache[path] = cur_parent
2119
def path2id(self, path):
2120
return self._transform.final_file_id(self._path2trans_id(path))
2122
def id2path(self, file_id):
2123
trans_id = self._transform.trans_id_file_id(file_id)
2125
return self._final_paths._determine_path(trans_id)
2127
raise errors.NoSuchId(self, file_id)
2129
def _all_children(self, trans_id):
2130
children = self._all_children_cache.get(trans_id)
2131
if children is not None:
2133
children = set(self._transform.iter_tree_children(trans_id))
2134
# children in the _new_parent set are provided by _by_parent.
2135
children.difference_update(self._transform._new_parent.keys())
2136
children.update(self._by_parent.get(trans_id, []))
2137
self._all_children_cache[trans_id] = children
2140
def iter_children(self, file_id):
2141
trans_id = self._transform.trans_id_file_id(file_id)
2142
for child_trans_id in self._all_children(trans_id):
2143
yield self._transform.final_file_id(child_trans_id)
2146
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2147
in self._transform._tree.extras())
2148
possible_extras.update(self._transform._new_contents)
2149
possible_extras.update(self._transform._removed_id)
2150
for trans_id in possible_extras:
2151
if self._transform.final_file_id(trans_id) is None:
2152
yield self._final_paths._determine_path(trans_id)
2154
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2155
yield_parents=False):
2156
for trans_id, parent_file_id in ordered_entries:
2157
file_id = self._transform.final_file_id(trans_id)
2160
if (specific_file_ids is not None
2161
and file_id not in specific_file_ids):
2163
kind = self._transform.final_kind(trans_id)
2165
kind = self._transform._tree.stored_kind(file_id)
2166
new_entry = inventory.make_entry(
2168
self._transform.final_name(trans_id),
2169
parent_file_id, file_id)
2170
yield new_entry, trans_id
2172
def _list_files_by_dir(self):
2173
todo = [ROOT_PARENT]
2175
while len(todo) > 0:
2177
parent_file_id = self._transform.final_file_id(parent)
2178
children = list(self._all_children(parent))
2179
paths = dict(zip(children, self._final_paths.get_paths(children)))
2180
children.sort(key=paths.get)
2181
todo.extend(reversed(children))
2182
for trans_id in children:
2183
ordered_ids.append((trans_id, parent_file_id))
2186
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2187
# This may not be a maximally efficient implementation, but it is
2188
# reasonably straightforward. An implementation that grafts the
2189
# TreeTransform changes onto the tree's iter_entries_by_dir results
2190
# might be more efficient, but requires tricky inferences about stack
2192
ordered_ids = self._list_files_by_dir()
2193
for entry, trans_id in self._make_inv_entries(ordered_ids,
2194
specific_file_ids, yield_parents=yield_parents):
2195
yield unicode(self._final_paths.get_path(trans_id)), entry
2197
def _iter_entries_for_dir(self, dir_path):
2198
"""Return path, entry for items in a directory without recursing down."""
2199
dir_file_id = self.path2id(dir_path)
2201
for file_id in self.iter_children(dir_file_id):
2202
trans_id = self._transform.trans_id_file_id(file_id)
2203
ordered_ids.append((trans_id, file_id))
2204
for entry, trans_id in self._make_inv_entries(ordered_ids):
2205
yield unicode(self._final_paths.get_path(trans_id)), entry
2207
def list_files(self, include_root=False, from_dir=None, recursive=True):
2208
"""See WorkingTree.list_files."""
2209
# XXX This should behave like WorkingTree.list_files, but is really
2210
# more like RevisionTree.list_files.
2214
prefix = from_dir + '/'
2215
entries = self.iter_entries_by_dir()
2216
for path, entry in entries:
2217
if entry.name == '' and not include_root:
2220
if not path.startswith(prefix):
2222
path = path[len(prefix):]
2223
yield path, 'V', entry.kind, entry.file_id, entry
2225
if from_dir is None and include_root is True:
2226
root_entry = inventory.make_entry('directory', '',
2227
ROOT_PARENT, self.get_root_id())
2228
yield '', 'V', 'directory', root_entry.file_id, root_entry
2229
entries = self._iter_entries_for_dir(from_dir or '')
2230
for path, entry in entries:
2231
yield path, 'V', entry.kind, entry.file_id, entry
2233
def kind(self, file_id):
2234
trans_id = self._transform.trans_id_file_id(file_id)
2235
return self._transform.final_kind(trans_id)
2237
def stored_kind(self, file_id):
2238
trans_id = self._transform.trans_id_file_id(file_id)
2240
return self._transform._new_contents[trans_id]
2242
return self._transform._tree.stored_kind(file_id)
2244
def get_file_mtime(self, file_id, path=None):
2245
"""See Tree.get_file_mtime"""
2246
if not self._content_change(file_id):
2247
return self._transform._tree.get_file_mtime(file_id)
2248
return self._stat_limbo_file(file_id).st_mtime
2250
def _file_size(self, entry, stat_value):
2251
return self.get_file_size(entry.file_id)
2253
def get_file_size(self, file_id):
2254
"""See Tree.get_file_size"""
2255
trans_id = self._transform.trans_id_file_id(file_id)
2256
kind = self._transform.final_kind(trans_id)
2259
if trans_id in self._transform._new_contents:
2260
return self._stat_limbo_file(trans_id=trans_id).st_size
2261
if self.kind(file_id) == 'file':
2262
return self._transform._tree.get_file_size(file_id)
2266
def get_file_sha1(self, file_id, path=None, stat_value=None):
2267
trans_id = self._transform.trans_id_file_id(file_id)
2268
kind = self._transform._new_contents.get(trans_id)
2270
return self._transform._tree.get_file_sha1(file_id)
2272
fileobj = self.get_file(file_id)
2274
return sha_file(fileobj)
2278
def is_executable(self, file_id, path=None):
2281
trans_id = self._transform.trans_id_file_id(file_id)
2283
return self._transform._new_executability[trans_id]
2286
return self._transform._tree.is_executable(file_id, path)
2288
if e.errno == errno.ENOENT:
2291
except errors.NoSuchId:
2294
def has_filename(self, path):
2295
trans_id = self._path2trans_id(path)
2296
if trans_id in self._transform._new_contents:
2298
elif trans_id in self._transform._removed_contents:
2301
return self._transform._tree.has_filename(path)
2303
def path_content_summary(self, path):
2304
trans_id = self._path2trans_id(path)
2305
tt = self._transform
2306
tree_path = tt._tree_id_paths.get(trans_id)
2307
kind = tt._new_contents.get(trans_id)
2309
if tree_path is None or trans_id in tt._removed_contents:
2310
return 'missing', None, None, None
2311
summary = tt._tree.path_content_summary(tree_path)
2312
kind, size, executable, link_or_sha1 = summary
2315
limbo_name = tt._limbo_name(trans_id)
2316
if trans_id in tt._new_reference_revision:
2317
kind = 'tree-reference'
2319
statval = os.lstat(limbo_name)
2320
size = statval.st_size
2321
if not supports_executable():
2324
executable = statval.st_mode & S_IEXEC
2328
if kind == 'symlink':
2329
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2330
executable = tt._new_executability.get(trans_id, executable)
2331
return kind, size, executable, link_or_sha1
2333
def iter_changes(self, from_tree, include_unchanged=False,
2334
specific_files=None, pb=None, extra_trees=None,
2335
require_versioned=True, want_unversioned=False):
2336
"""See InterTree.iter_changes.
2338
This has a fast path that is only used when the from_tree matches
2339
the transform tree, and no fancy options are supplied.
2341
if (from_tree is not self._transform._tree or include_unchanged or
2342
specific_files or want_unversioned):
2343
return tree.InterTree(from_tree, self).iter_changes(
2344
include_unchanged=include_unchanged,
2345
specific_files=specific_files,
2347
extra_trees=extra_trees,
2348
require_versioned=require_versioned,
2349
want_unversioned=want_unversioned)
2350
if want_unversioned:
2351
raise ValueError('want_unversioned is not supported')
2352
return self._transform.iter_changes()
2354
def get_file(self, file_id, path=None):
2355
"""See Tree.get_file"""
2356
if not self._content_change(file_id):
2357
return self._transform._tree.get_file(file_id, path)
2358
trans_id = self._transform.trans_id_file_id(file_id)
2359
name = self._transform._limbo_name(trans_id)
2360
return open(name, 'rb')
2362
def get_file_with_stat(self, file_id, path=None):
2363
return self.get_file(file_id, path), None
2365
def annotate_iter(self, file_id,
2366
default_revision=_mod_revision.CURRENT_REVISION):
2367
changes = self._iter_changes_cache.get(file_id)
2371
changed_content, versioned, kind = (changes[2], changes[3],
2375
get_old = (kind[0] == 'file' and versioned[0])
2377
old_annotation = self._transform._tree.annotate_iter(file_id,
2378
default_revision=default_revision)
2382
return old_annotation
2383
if not changed_content:
2384
return old_annotation
2385
# TODO: This is doing something similar to what WT.annotate_iter is
2386
# doing, however it fails slightly because it doesn't know what
2387
# the *other* revision_id is, so it doesn't know how to give the
2388
# other as the origin for some lines, they all get
2389
# 'default_revision'
2390
# It would be nice to be able to use the new Annotator based
2391
# approach, as well.
2392
return annotate.reannotate([old_annotation],
2393
self.get_file(file_id).readlines(),
2396
def get_symlink_target(self, file_id, path=None):
2397
"""See Tree.get_symlink_target"""
2398
if not self._content_change(file_id):
2399
return self._transform._tree.get_symlink_target(file_id)
2400
trans_id = self._transform.trans_id_file_id(file_id)
2401
name = self._transform._limbo_name(trans_id)
2402
return osutils.readlink(name)
2404
def walkdirs(self, prefix=''):
2405
pending = [self._transform.root]
2406
while len(pending) > 0:
2407
parent_id = pending.pop()
2410
prefix = prefix.rstrip('/')
2411
parent_path = self._final_paths.get_path(parent_id)
2412
parent_file_id = self._transform.final_file_id(parent_id)
2413
for child_id in self._all_children(parent_id):
2414
path_from_root = self._final_paths.get_path(child_id)
2415
basename = self._transform.final_name(child_id)
2416
file_id = self._transform.final_file_id(child_id)
2417
kind = self._transform.final_kind(child_id)
2418
if kind is not None:
2419
versioned_kind = kind
2422
versioned_kind = self._transform._tree.stored_kind(file_id)
2423
if versioned_kind == 'directory':
2424
subdirs.append(child_id)
2425
children.append((path_from_root, basename, kind, None,
2426
file_id, versioned_kind))
2428
if parent_path.startswith(prefix):
2429
yield (parent_path, parent_file_id), children
2430
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2433
def get_parent_ids(self):
2434
return self._parent_ids
2436
def set_parent_ids(self, parent_ids):
2437
self._parent_ids = parent_ids
2439
def get_revision_tree(self, revision_id):
2440
return self._transform._tree.get_revision_tree(revision_id)
864
2443
def joinpath(parent, child):
865
2444
"""Join tree-relative paths, handling the tree root specially"""
866
2445
if parent is None or parent == "":
896
2475
self._known_paths[trans_id] = self._determine_path(trans_id)
897
2476
return self._known_paths[trans_id]
2478
def get_paths(self, trans_ids):
2479
return [(self.get_path(t), t) for t in trans_ids]
899
2483
def topology_sorted_ids(tree):
900
2484
"""Determine the topological order of the ids in a tree"""
901
2485
file_ids = list(tree)
902
2486
file_ids.sort(key=tree.id2path)
905
def build_tree(tree, wt):
906
"""Create working tree for a branch, using a Transaction."""
2490
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2491
delta_from_tree=False):
2492
"""Create working tree for a branch, using a TreeTransform.
2494
This function should be used on empty trees, having a tree root at most.
2495
(see merge and revert functionality for working with existing trees)
2497
Existing files are handled like so:
2499
- Existing bzrdirs take precedence over creating new items. They are
2500
created as '%s.diverted' % name.
2501
- Otherwise, if the content on disk matches the content we are building,
2502
it is silently replaced.
2503
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2505
:param tree: The tree to convert wt into a copy of
2506
:param wt: The working tree that files will be placed into
2507
:param accelerator_tree: A tree which can be used for retrieving file
2508
contents more quickly than tree itself, i.e. a workingtree. tree
2509
will be used for cases where accelerator_tree's content is different.
2510
:param hardlink: If true, hard-link files to accelerator_tree, where
2511
possible. accelerator_tree must implement abspath, i.e. be a
2513
:param delta_from_tree: If true, build_tree may use the input Tree to
2514
generate the inventory delta.
2516
wt.lock_tree_write()
2520
if accelerator_tree is not None:
2521
accelerator_tree.lock_read()
2523
return _build_tree(tree, wt, accelerator_tree, hardlink,
2526
if accelerator_tree is not None:
2527
accelerator_tree.unlock()
2534
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2535
"""See build_tree."""
2536
for num, _unused in enumerate(wt.all_file_ids()):
2537
if num > 0: # more than just a root
2538
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
907
2539
file_trans_id = {}
908
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2540
top_pb = ui.ui_factory.nested_progress_bar()
909
2541
pp = ProgressPhase("Build phase", 2, top_pb)
2542
if tree.inventory.root is not None:
2543
# This is kind of a hack: we should be altering the root
2544
# as part of the regular tree shape diff logic.
2545
# The conditional test here is to avoid doing an
2546
# expensive operation (flush) every time the root id
2547
# is set within the tree, nor setting the root and thus
2548
# marking the tree as dirty, because we use two different
2549
# idioms here: tree interfaces and inventory interfaces.
2550
if wt.get_root_id() != tree.get_root_id():
2551
wt.set_root_id(tree.get_root_id())
910
2553
tt = TreeTransform(wt)
913
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
914
file_ids = topology_sorted_ids(tree)
915
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2557
file_trans_id[wt.get_root_id()] = \
2558
tt.trans_id_tree_file_id(wt.get_root_id())
2559
pb = ui.ui_factory.nested_progress_bar()
917
for num, file_id in enumerate(file_ids):
918
pb.update("Building tree", num, len(file_ids))
919
entry = tree.inventory[file_id]
2561
deferred_contents = []
2563
total = len(tree.inventory)
2565
precomputed_delta = []
2567
precomputed_delta = None
2568
# Check if tree inventory has content. If so, we populate
2569
# existing_files with the directory content. If there are no
2570
# entries we skip populating existing_files as its not used.
2571
# This improves performance and unncessary work on large
2572
# directory trees. (#501307)
2574
existing_files = set()
2575
for dir, files in wt.walkdirs():
2576
existing_files.update(f[0] for f in files)
2577
for num, (tree_path, entry) in \
2578
enumerate(tree.inventory.iter_entries_by_dir()):
2579
pb.update("Building tree", num - len(deferred_contents), total)
920
2580
if entry.parent_id is None:
922
if entry.parent_id not in file_trans_id:
923
raise repr(entry.parent_id)
2583
file_id = entry.file_id
2585
precomputed_delta.append((None, tree_path, file_id, entry))
2586
if tree_path in existing_files:
2587
target_path = wt.abspath(tree_path)
2588
kind = file_kind(target_path)
2589
if kind == "directory":
2591
bzrdir.BzrDir.open(target_path)
2592
except errors.NotBranchError:
2596
if (file_id not in divert and
2597
_content_match(tree, entry, file_id, kind,
2599
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2600
if kind == 'directory':
924
2602
parent_id = file_trans_id[entry.parent_id]
925
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2603
if entry.kind == 'file':
2604
# We *almost* replicate new_by_entry, so that we can defer
2605
# getting the file text, and get them all at once.
2606
trans_id = tt.create_path(entry.name, parent_id)
2607
file_trans_id[file_id] = trans_id
2608
tt.version_file(file_id, trans_id)
2609
executable = tree.is_executable(file_id, tree_path)
2611
tt.set_executability(executable, trans_id)
2612
trans_data = (trans_id, tree_path, entry.text_sha1)
2613
deferred_contents.append((file_id, trans_data))
2615
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2618
new_trans_id = file_trans_id[file_id]
2619
old_parent = tt.trans_id_tree_path(tree_path)
2620
_reparent_children(tt, old_parent, new_trans_id)
2621
offset = num + 1 - len(deferred_contents)
2622
_create_files(tt, tree, deferred_contents, pb, offset,
2623
accelerator_tree, hardlink)
2627
divert_trans = set(file_trans_id[f] for f in divert)
2628
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2629
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2630
if len(raw_conflicts) > 0:
2631
precomputed_delta = None
2632
conflicts = cook_conflicts(raw_conflicts, tt)
2633
for conflict in conflicts:
2634
trace.warning(unicode(conflict))
2636
wt.add_conflicts(conflicts)
2637
except errors.UnsupportedOperation:
2639
result = tt.apply(no_conflicts=True,
2640
precomputed_delta=precomputed_delta)
933
2643
top_pb.finished()
2647
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2649
total = len(desired_files) + offset
2651
if accelerator_tree is None:
2652
new_desired_files = desired_files
2654
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2655
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2656
in iter if not (c or e[0] != e[1])]
2657
if accelerator_tree.supports_content_filtering():
2658
unchanged = [(f, p) for (f, p) in unchanged
2659
if not accelerator_tree.iter_search_rules([p]).next()]
2660
unchanged = dict(unchanged)
2661
new_desired_files = []
2663
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2664
accelerator_path = unchanged.get(file_id)
2665
if accelerator_path is None:
2666
new_desired_files.append((file_id,
2667
(trans_id, tree_path, text_sha1)))
2669
pb.update('Adding file contents', count + offset, total)
2671
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2674
contents = accelerator_tree.get_file(file_id, accelerator_path)
2675
if wt.supports_content_filtering():
2676
filters = wt._content_filter_stack(tree_path)
2677
contents = filtered_output_bytes(contents, filters,
2678
ContentFilterContext(tree_path, tree))
2680
tt.create_file(contents, trans_id, sha1=text_sha1)
2684
except AttributeError:
2685
# after filtering, contents may no longer be file-like
2689
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2690
tree.iter_files_bytes(new_desired_files)):
2691
if wt.supports_content_filtering():
2692
filters = wt._content_filter_stack(tree_path)
2693
contents = filtered_output_bytes(contents, filters,
2694
ContentFilterContext(tree_path, tree))
2695
tt.create_file(contents, trans_id, sha1=text_sha1)
2696
pb.update('Adding file contents', count + offset, total)
2699
def _reparent_children(tt, old_parent, new_parent):
2700
for child in tt.iter_tree_children(old_parent):
2701
tt.adjust_path(tt.final_name(child), new_parent, child)
2704
def _reparent_transform_children(tt, old_parent, new_parent):
2705
by_parent = tt.by_parent()
2706
for child in by_parent[old_parent]:
2707
tt.adjust_path(tt.final_name(child), new_parent, child)
2708
return by_parent[old_parent]
2711
def _content_match(tree, entry, file_id, kind, target_path):
2712
if entry.kind != kind:
2714
if entry.kind == "directory":
2716
if entry.kind == "file":
2717
f = file(target_path, 'rb')
2719
if tree.get_file_text(file_id) == f.read():
2723
elif entry.kind == "symlink":
2724
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2729
def resolve_checkout(tt, conflicts, divert):
2730
new_conflicts = set()
2731
for c_type, conflict in ((c[0], c) for c in conflicts):
2732
# Anything but a 'duplicate' would indicate programmer error
2733
if c_type != 'duplicate':
2734
raise AssertionError(c_type)
2735
# Now figure out which is new and which is old
2736
if tt.new_contents(conflict[1]):
2737
new_file = conflict[1]
2738
old_file = conflict[2]
2740
new_file = conflict[2]
2741
old_file = conflict[1]
2743
# We should only get here if the conflict wasn't completely
2745
final_parent = tt.final_parent(old_file)
2746
if new_file in divert:
2747
new_name = tt.final_name(old_file)+'.diverted'
2748
tt.adjust_path(new_name, final_parent, new_file)
2749
new_conflicts.add((c_type, 'Diverted to',
2750
new_file, old_file))
2752
new_name = tt.final_name(old_file)+'.moved'
2753
tt.adjust_path(new_name, final_parent, old_file)
2754
new_conflicts.add((c_type, 'Moved existing file to',
2755
old_file, new_file))
2756
return new_conflicts
935
2759
def new_by_entry(tt, entry, parent_id, tree):
936
2760
"""Create a new file according to its inventory entry"""
1046
2844
working_kind = working_tree.kind(file_id)
1047
2845
has_contents = True
1049
if e.errno != errno.ENOENT:
1051
2847
has_contents = False
1052
2848
contents_mod = True
1053
2849
meta_mod = False
1054
2850
if has_contents is True:
1055
real_e_kind = entry.kind
1056
if real_e_kind == 'root_directory':
1057
real_e_kind = 'directory'
1058
if real_e_kind != working_kind:
2851
if entry.kind != working_kind:
1059
2852
contents_mod, meta_mod = True, False
1061
cur_entry._read_tree_state(working_tree.id2path(file_id),
2854
cur_entry._read_tree_state(working_tree.id2path(file_id),
1063
2856
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1064
2857
cur_entry._forget_tree_state()
1065
2858
return has_contents, contents_mod, meta_mod
1068
def revert(working_tree, target_tree, filenames, backups=False,
1069
pb=DummyProgress()):
2861
def revert(working_tree, target_tree, filenames, backups=False,
2862
pb=None, change_reporter=None):
1070
2863
"""Revert a working tree's contents to those of a target tree."""
1071
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1072
def interesting(file_id):
1073
return interesting_ids is None or file_id in interesting_ids
2864
target_tree.lock_read()
2865
pb = ui.ui_factory.nested_progress_bar()
1075
2866
tt = TreeTransform(working_tree, pb)
1077
merge_modified = working_tree.merge_modified()
1079
def trans_id_file_id(file_id):
1081
return trans_id[file_id]
1083
return tt.trans_id_tree_file_id(file_id)
1085
pp = ProgressPhase("Revert phase", 4, pb)
1087
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1089
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1091
by_parent = tt.by_parent()
1092
for id_num, file_id in enumerate(sorted_interesting):
1093
child_pb.update("Reverting file", id_num+1,
1094
len(sorted_interesting))
1095
if file_id not in working_tree.inventory:
1096
entry = target_tree.inventory[file_id]
1097
parent_id = trans_id_file_id(entry.parent_id)
1098
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1099
trans_id[file_id] = e_trans_id
1101
backup_this = backups
1102
if file_id in merge_modified:
1104
del merge_modified[file_id]
1105
change_entry(tt, file_id, working_tree, target_tree,
1106
trans_id_file_id, backup_this, trans_id,
1111
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1112
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1114
for id_num, file_id in enumerate(wt_interesting):
1115
child_pb.update("New file check", id_num+1,
1116
len(sorted_interesting))
1117
if file_id not in target_tree:
1118
trans_id = tt.trans_id_tree_file_id(file_id)
1119
tt.unversion_file(trans_id)
1120
if file_id in merge_modified:
2868
pp = ProgressPhase("Revert phase", 3, pb)
2869
conflicts, merge_modified = _prepare_revert_transform(
2870
working_tree, target_tree, tt, filenames, backups, pp)
2872
change_reporter = delta._ChangeReporter(
2873
unversioned_filter=working_tree.is_ignored)
2874
delta.report_changes(tt.iter_changes(), change_reporter)
2875
for conflict in conflicts:
2876
trace.warning(unicode(conflict))
2879
working_tree.set_merge_modified(merge_modified)
2881
target_tree.unlock()
2887
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2888
backups, pp, basis_tree=None,
2889
merge_modified=None):
2890
child_pb = ui.ui_factory.nested_progress_bar()
2892
if merge_modified is None:
2893
merge_modified = working_tree.merge_modified()
2894
merge_modified = _alter_files(working_tree, target_tree, tt,
2895
child_pb, filenames, backups,
2896
merge_modified, basis_tree)
2899
child_pb = ui.ui_factory.nested_progress_bar()
2901
raw_conflicts = resolve_conflicts(tt, child_pb,
2902
lambda t, c: conflict_pass(t, c, target_tree))
2905
conflicts = cook_conflicts(raw_conflicts, tt)
2906
return conflicts, merge_modified
2909
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2910
backups, merge_modified, basis_tree=None):
2911
if basis_tree is not None:
2912
basis_tree.lock_read()
2913
# We ask the working_tree for its changes relative to the target, rather
2914
# than the target changes relative to the working tree. Because WT4 has an
2915
# optimizer to compare itself to a target, but no optimizer for the
2917
change_list = working_tree.iter_changes(target_tree,
2918
specific_files=specific_files, pb=pb)
2919
if target_tree.get_root_id() is None:
2925
for id_num, (file_id, path, changed_content, versioned, parent, name,
2926
kind, executable) in enumerate(change_list):
2927
target_path, wt_path = path
2928
target_versioned, wt_versioned = versioned
2929
target_parent, wt_parent = parent
2930
target_name, wt_name = name
2931
target_kind, wt_kind = kind
2932
target_executable, wt_executable = executable
2933
if skip_root and wt_parent is None:
2935
trans_id = tt.trans_id_file_id(file_id)
2938
keep_content = False
2939
if wt_kind == 'file' and (backups or target_kind is None):
2940
wt_sha1 = working_tree.get_file_sha1(file_id)
2941
if merge_modified.get(file_id) != wt_sha1:
2942
# acquire the basis tree lazily to prevent the
2943
# expense of accessing it when it's not needed ?
2944
# (Guessing, RBC, 200702)
2945
if basis_tree is None:
2946
basis_tree = working_tree.basis_tree()
2947
basis_tree.lock_read()
2948
if basis_tree.has_id(file_id):
2949
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2951
elif target_kind is None and not target_versioned:
2953
if wt_kind is not None:
2954
if not keep_content:
1121
2955
tt.delete_contents(trans_id)
1122
del merge_modified[file_id]
1126
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1128
raw_conflicts = resolve_conflicts(tt, child_pb)
1131
conflicts = cook_conflicts(raw_conflicts, tt)
1132
for conflict in conflicts:
1136
working_tree.set_merge_modified({})
2956
elif target_kind is not None:
2957
parent_trans_id = tt.trans_id_file_id(wt_parent)
2958
backup_name = tt._available_backup_name(
2959
wt_name, parent_trans_id)
2960
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2961
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2962
if wt_versioned and target_versioned:
2963
tt.unversion_file(trans_id)
2964
tt.version_file(file_id, new_trans_id)
2965
# New contents should have the same unix perms as old
2968
trans_id = new_trans_id
2969
if target_kind in ('directory', 'tree-reference'):
2970
tt.create_directory(trans_id)
2971
if target_kind == 'tree-reference':
2972
revision = target_tree.get_reference_revision(file_id,
2974
tt.set_tree_reference(revision, trans_id)
2975
elif target_kind == 'symlink':
2976
tt.create_symlink(target_tree.get_symlink_target(file_id),
2978
elif target_kind == 'file':
2979
deferred_files.append((file_id, (trans_id, mode_id)))
2980
if basis_tree is None:
2981
basis_tree = working_tree.basis_tree()
2982
basis_tree.lock_read()
2983
new_sha1 = target_tree.get_file_sha1(file_id)
2984
if (basis_tree.has_id(file_id) and
2985
new_sha1 == basis_tree.get_file_sha1(file_id)):
2986
if file_id in merge_modified:
2987
del merge_modified[file_id]
2989
merge_modified[file_id] = new_sha1
2991
# preserve the execute bit when backing up
2992
if keep_content and wt_executable == target_executable:
2993
tt.set_executability(target_executable, trans_id)
2994
elif target_kind is not None:
2995
raise AssertionError(target_kind)
2996
if not wt_versioned and target_versioned:
2997
tt.version_file(file_id, trans_id)
2998
if wt_versioned and not target_versioned:
2999
tt.unversion_file(trans_id)
3000
if (target_name is not None and
3001
(wt_name != target_name or wt_parent != target_parent)):
3002
if target_name == '' and target_parent is None:
3003
parent_trans = ROOT_PARENT
3005
parent_trans = tt.trans_id_file_id(target_parent)
3006
if wt_parent is None and wt_versioned:
3007
tt.adjust_root_path(target_name, parent_trans)
3009
tt.adjust_path(target_name, parent_trans, trans_id)
3010
if wt_executable != target_executable and target_kind == "file":
3011
tt.set_executability(target_executable, trans_id)
3012
if working_tree.supports_content_filtering():
3013
for index, ((trans_id, mode_id), bytes) in enumerate(
3014
target_tree.iter_files_bytes(deferred_files)):
3015
file_id = deferred_files[index][0]
3016
# We're reverting a tree to the target tree so using the
3017
# target tree to find the file path seems the best choice
3018
# here IMO - Ian C 27/Oct/2009
3019
filter_tree_path = target_tree.id2path(file_id)
3020
filters = working_tree._content_filter_stack(filter_tree_path)
3021
bytes = filtered_output_bytes(bytes, filters,
3022
ContentFilterContext(filter_tree_path, working_tree))
3023
tt.create_file(bytes, trans_id, mode_id)
3025
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3027
tt.create_file(bytes, trans_id, mode_id)
3028
tt.fixup_new_roots()
1143
def resolve_conflicts(tt, pb=DummyProgress()):
3030
if basis_tree is not None:
3032
return merge_modified
3035
def resolve_conflicts(tt, pb=None, pass_func=None):
1144
3036
"""Make many conflict-resolution attempts, but die if they fail"""
3037
if pass_func is None:
3038
pass_func = conflict_pass
1145
3039
new_conflicts = set()
3040
pb = ui.ui_factory.nested_progress_bar()
1147
3042
for n in range(10):
1148
3043
pb.update('Resolution pass', n+1, 10)
1149
3044
conflicts = tt.find_conflicts()
1150
3045
if len(conflicts) == 0:
1151
3046
return new_conflicts
1152
new_conflicts.update(conflict_pass(tt, conflicts))
3047
new_conflicts.update(pass_func(tt, conflicts))
1153
3048
raise MalformedTransform(conflicts=conflicts)
1158
def conflict_pass(tt, conflicts):
1159
"""Resolve some classes of conflicts."""
3053
def conflict_pass(tt, conflicts, path_tree=None):
3054
"""Resolve some classes of conflicts.
3056
:param tt: The transform to resolve conflicts in
3057
:param conflicts: The conflicts to resolve
3058
:param path_tree: A Tree to get supplemental paths from
1160
3060
new_conflicts = set()
1161
3061
for c_type, conflict in ((c[0], c) for c in conflicts):
1162
3062
if c_type == 'duplicate id':