1
# Copyright (C) 2006-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
from stat import S_ISREG, S_IEXEC
29
lazy_import.lazy_import(globals(), """
40
revision as _mod_revision,
45
from bzrlib.errors import (DuplicateKey, MalformedTransform, NoSuchFile,
46
ReusingTransform, CantMoveRoot,
47
ExistingLimbo, ImmortalLimbo, NoFinalPath,
49
from bzrlib.filters import filtered_output_bytes, ContentFilterContext
50
from bzrlib.osutils import (
59
from bzrlib.progress import ProgressPhase
60
from bzrlib.symbol_versioning import (
67
ROOT_PARENT = "root-parent"
69
def unique_add(map, key, value):
71
raise DuplicateKey(key=key)
76
class _TransformResults(object):
77
def __init__(self, modified_paths, rename_count):
79
self.modified_paths = modified_paths
80
self.rename_count = rename_count
83
class TreeTransformBase(object):
84
"""The base class for TreeTransform and its kin."""
86
def __init__(self, tree, pb=None,
90
:param tree: The tree that will be transformed, but not necessarily
93
:param case_sensitive: If True, the target of the transform is
94
case sensitive, not just case preserving.
99
# mapping of trans_id -> new basename
101
# mapping of trans_id -> new parent trans_id
102
self._new_parent = {}
103
# mapping of trans_id with new contents -> new file_kind
104
self._new_contents = {}
105
# mapping of trans_id => (sha1 of content, stat_value)
106
self._observed_sha1s = {}
107
# Set of trans_ids whose contents will be removed
108
self._removed_contents = set()
109
# Mapping of trans_id -> new execute-bit value
110
self._new_executability = {}
111
# Mapping of trans_id -> new tree-reference value
112
self._new_reference_revision = {}
113
# Mapping of trans_id -> new file_id
115
# Mapping of old file-id -> trans_id
116
self._non_present_ids = {}
117
# Mapping of new file_id -> trans_id
119
# Set of trans_ids that will be removed
120
self._removed_id = set()
121
# Mapping of path in old tree -> trans_id
122
self._tree_path_ids = {}
123
# Mapping trans_id -> path in old tree
124
self._tree_id_paths = {}
125
# The trans_id that will be used as the tree root
126
root_id = tree.get_root_id()
127
if root_id is not None:
128
self._new_root = self.trans_id_tree_file_id(root_id)
130
self._new_root = None
131
# Indicator of whether the transform has been applied
135
# Whether the target is case sensitive
136
self._case_sensitive_target = case_sensitive
137
# A counter of how many files have been renamed
138
self.rename_count = 0
141
"""Release the working tree lock, if held.
143
This is required if apply has not been invoked, but can be invoked
146
if self._tree is None:
151
def __get_root(self):
152
return self._new_root
154
root = property(__get_root)
156
def _assign_id(self):
157
"""Produce a new tranform id"""
158
new_id = "new-%s" % self._id_number
162
def create_path(self, name, parent):
163
"""Assign a transaction id to a new path"""
164
trans_id = self._assign_id()
165
unique_add(self._new_name, trans_id, name)
166
unique_add(self._new_parent, trans_id, parent)
169
def adjust_path(self, name, parent, trans_id):
170
"""Change the path that is assigned to a transaction id."""
172
raise ValueError("Parent trans-id may not be None")
173
if trans_id == self._new_root:
175
self._new_name[trans_id] = name
176
self._new_parent[trans_id] = parent
178
def adjust_root_path(self, name, parent):
179
"""Emulate moving the root by moving all children, instead.
181
We do this by undoing the association of root's transaction id with the
182
current tree. This allows us to create a new directory with that
183
transaction id. We unversion the root directory and version the
184
physically new directory, and hope someone versions the tree root
187
old_root = self._new_root
188
old_root_file_id = self.final_file_id(old_root)
189
# force moving all children of root
190
for child_id in self.iter_tree_children(old_root):
191
if child_id != parent:
192
self.adjust_path(self.final_name(child_id),
193
self.final_parent(child_id), child_id)
194
file_id = self.final_file_id(child_id)
195
if file_id is not None:
196
self.unversion_file(child_id)
197
self.version_file(file_id, child_id)
199
# the physical root needs a new transaction id
200
self._tree_path_ids.pop("")
201
self._tree_id_paths.pop(old_root)
202
self._new_root = self.trans_id_tree_file_id(self._tree.get_root_id())
203
if parent == old_root:
204
parent = self._new_root
205
self.adjust_path(name, parent, old_root)
206
self.create_directory(old_root)
207
self.version_file(old_root_file_id, old_root)
208
self.unversion_file(self._new_root)
210
def fixup_new_roots(self):
211
"""Reinterpret requests to change the root directory
213
Instead of creating a root directory, or moving an existing directory,
214
all the attributes and children of the new root are applied to the
215
existing root directory.
217
This means that the old root trans-id becomes obsolete, so it is
218
recommended only to invoke this after the root trans-id has become
221
new_roots = [k for k, v in self._new_parent.iteritems() if v is
223
if len(new_roots) < 1:
225
if len(new_roots) != 1:
226
raise ValueError('A tree cannot have two roots!')
227
if self._new_root is None:
228
self._new_root = new_roots[0]
230
old_new_root = new_roots[0]
231
# TODO: What to do if a old_new_root is present, but self._new_root is
232
# not listed as being removed? This code explicitly unversions
233
# the old root and versions it with the new file_id. Though that
234
# seems like an incomplete delta
236
# unversion the new root's directory.
237
file_id = self.final_file_id(old_new_root)
238
if old_new_root in self._new_id:
239
self.cancel_versioning(old_new_root)
241
self.unversion_file(old_new_root)
242
# if, at this stage, root still has an old file_id, zap it so we can
243
# stick a new one in.
244
if (self.tree_file_id(self._new_root) is not None and
245
self._new_root not in self._removed_id):
246
self.unversion_file(self._new_root)
247
self.version_file(file_id, self._new_root)
249
# Now move children of new root into old root directory.
250
# Ensure all children are registered with the transaction, but don't
251
# use directly-- some tree children have new parents
252
list(self.iter_tree_children(old_new_root))
253
# Move all children of new root into old root directory.
254
for child in self.by_parent().get(old_new_root, []):
255
self.adjust_path(self.final_name(child), self._new_root, child)
257
# Ensure old_new_root has no directory.
258
if old_new_root in self._new_contents:
259
self.cancel_creation(old_new_root)
261
self.delete_contents(old_new_root)
263
# prevent deletion of root directory.
264
if self._new_root in self._removed_contents:
265
self.cancel_deletion(self._new_root)
267
# destroy path info for old_new_root.
268
del self._new_parent[old_new_root]
269
del self._new_name[old_new_root]
271
def trans_id_tree_file_id(self, inventory_id):
272
"""Determine the transaction id of a working tree file.
274
This reflects only files that already exist, not ones that will be
275
added by transactions.
277
if inventory_id is None:
278
raise ValueError('None is not a valid file id')
279
path = self._tree.id2path(inventory_id)
280
return self.trans_id_tree_path(path)
282
def trans_id_file_id(self, file_id):
283
"""Determine or set the transaction id associated with a file ID.
284
A new id is only created for file_ids that were never present. If
285
a transaction has been unversioned, it is deliberately still returned.
286
(this will likely lead to an unversioned parent conflict.)
289
raise ValueError('None is not a valid file id')
290
if file_id in self._r_new_id and self._r_new_id[file_id] is not None:
291
return self._r_new_id[file_id]
294
self._tree.iter_entries_by_dir([file_id]).next()
295
except StopIteration:
296
if file_id in self._non_present_ids:
297
return self._non_present_ids[file_id]
299
trans_id = self._assign_id()
300
self._non_present_ids[file_id] = trans_id
303
return self.trans_id_tree_file_id(file_id)
305
def trans_id_tree_path(self, path):
306
"""Determine (and maybe set) the transaction ID for a tree path."""
307
path = self.canonical_path(path)
308
if path not in self._tree_path_ids:
309
self._tree_path_ids[path] = self._assign_id()
310
self._tree_id_paths[self._tree_path_ids[path]] = path
311
return self._tree_path_ids[path]
313
def get_tree_parent(self, trans_id):
314
"""Determine id of the parent in the tree."""
315
path = self._tree_id_paths[trans_id]
318
return self.trans_id_tree_path(os.path.dirname(path))
320
def delete_contents(self, trans_id):
321
"""Schedule the contents of a path entry for deletion"""
322
kind = self.tree_kind(trans_id)
324
self._removed_contents.add(trans_id)
326
def cancel_deletion(self, trans_id):
327
"""Cancel a scheduled deletion"""
328
self._removed_contents.remove(trans_id)
330
def unversion_file(self, trans_id):
331
"""Schedule a path entry to become unversioned"""
332
self._removed_id.add(trans_id)
334
def delete_versioned(self, trans_id):
335
"""Delete and unversion a versioned file"""
336
self.delete_contents(trans_id)
337
self.unversion_file(trans_id)
339
def set_executability(self, executability, trans_id):
340
"""Schedule setting of the 'execute' bit
341
To unschedule, set to None
343
if executability is None:
344
del self._new_executability[trans_id]
346
unique_add(self._new_executability, trans_id, executability)
348
def set_tree_reference(self, revision_id, trans_id):
349
"""Set the reference associated with a directory"""
350
unique_add(self._new_reference_revision, trans_id, revision_id)
352
def version_file(self, file_id, trans_id):
353
"""Schedule a file to become versioned."""
356
unique_add(self._new_id, trans_id, file_id)
357
unique_add(self._r_new_id, file_id, trans_id)
359
def cancel_versioning(self, trans_id):
360
"""Undo a previous versioning of a file"""
361
file_id = self._new_id[trans_id]
362
del self._new_id[trans_id]
363
del self._r_new_id[file_id]
365
def new_paths(self, filesystem_only=False):
366
"""Determine the paths of all new and changed files.
368
:param filesystem_only: if True, only calculate values for files
369
that require renames or execute bit changes.
373
stale_ids = self._needs_rename.difference(self._new_name)
374
stale_ids.difference_update(self._new_parent)
375
stale_ids.difference_update(self._new_contents)
376
stale_ids.difference_update(self._new_id)
377
needs_rename = self._needs_rename.difference(stale_ids)
378
id_sets = (needs_rename, self._new_executability)
380
id_sets = (self._new_name, self._new_parent, self._new_contents,
381
self._new_id, self._new_executability)
382
for id_set in id_sets:
383
new_ids.update(id_set)
384
return sorted(FinalPaths(self).get_paths(new_ids))
386
def _inventory_altered(self):
387
"""Get the trans_ids and paths of files needing new inv entries."""
389
for id_set in [self._new_name, self._new_parent, self._new_id,
390
self._new_executability]:
391
new_ids.update(id_set)
392
changed_kind = set(self._removed_contents)
393
changed_kind.intersection_update(self._new_contents)
394
changed_kind.difference_update(new_ids)
395
changed_kind = (t for t in changed_kind
396
if self.tree_kind(t) != self.final_kind(t))
397
new_ids.update(changed_kind)
398
return sorted(FinalPaths(self).get_paths(new_ids))
400
def final_kind(self, trans_id):
401
"""Determine the final file kind, after any changes applied.
403
:return: None if the file does not exist/has no contents. (It is
404
conceivable that a path would be created without the corresponding
405
contents insertion command)
407
if trans_id in self._new_contents:
408
return self._new_contents[trans_id]
409
elif trans_id in self._removed_contents:
412
return self.tree_kind(trans_id)
414
def tree_file_id(self, trans_id):
415
"""Determine the file id associated with the trans_id in the tree"""
417
path = self._tree_id_paths[trans_id]
419
# the file is a new, unversioned file, or invalid trans_id
421
# the file is old; the old id is still valid
422
if self._new_root == trans_id:
423
return self._tree.get_root_id()
424
return self._tree.path2id(path)
426
def final_file_id(self, trans_id):
427
"""Determine the file id after any changes are applied, or None.
429
None indicates that the file will not be versioned after changes are
433
return self._new_id[trans_id]
435
if trans_id in self._removed_id:
437
return self.tree_file_id(trans_id)
439
def inactive_file_id(self, trans_id):
440
"""Return the inactive file_id associated with a transaction id.
441
That is, the one in the tree or in non_present_ids.
442
The file_id may actually be active, too.
444
file_id = self.tree_file_id(trans_id)
445
if file_id is not None:
447
for key, value in self._non_present_ids.iteritems():
448
if value == trans_id:
451
def final_parent(self, trans_id):
452
"""Determine the parent file_id, after any changes are applied.
454
ROOT_PARENT is returned for the tree root.
457
return self._new_parent[trans_id]
459
return self.get_tree_parent(trans_id)
461
def final_name(self, trans_id):
462
"""Determine the final filename, after all changes are applied."""
464
return self._new_name[trans_id]
467
return os.path.basename(self._tree_id_paths[trans_id])
469
raise NoFinalPath(trans_id, self)
472
"""Return a map of parent: children for known parents.
474
Only new paths and parents of tree files with assigned ids are used.
477
items = list(self._new_parent.iteritems())
478
items.extend((t, self.final_parent(t)) for t in
479
self._tree_id_paths.keys())
480
for trans_id, parent_id in items:
481
if parent_id not in by_parent:
482
by_parent[parent_id] = set()
483
by_parent[parent_id].add(trans_id)
486
def path_changed(self, trans_id):
487
"""Return True if a trans_id's path has changed."""
488
return (trans_id in self._new_name) or (trans_id in self._new_parent)
490
def new_contents(self, trans_id):
491
return (trans_id in self._new_contents)
493
def find_conflicts(self):
494
"""Find any violations of inventory or filesystem invariants"""
495
if self._done is True:
496
raise ReusingTransform()
498
# ensure all children of all existent parents are known
499
# all children of non-existent parents are known, by definition.
500
self._add_tree_children()
501
by_parent = self.by_parent()
502
conflicts.extend(self._unversioned_parents(by_parent))
503
conflicts.extend(self._parent_loops())
504
conflicts.extend(self._duplicate_entries(by_parent))
505
conflicts.extend(self._duplicate_ids())
506
conflicts.extend(self._parent_type_conflicts(by_parent))
507
conflicts.extend(self._improper_versioning())
508
conflicts.extend(self._executability_conflicts())
509
conflicts.extend(self._overwrite_conflicts())
512
def _check_malformed(self):
513
conflicts = self.find_conflicts()
514
if len(conflicts) != 0:
515
raise MalformedTransform(conflicts=conflicts)
517
def _add_tree_children(self):
518
"""Add all the children of all active parents to the known paths.
520
Active parents are those which gain children, and those which are
521
removed. This is a necessary first step in detecting conflicts.
523
parents = self.by_parent().keys()
524
parents.extend([t for t in self._removed_contents if
525
self.tree_kind(t) == 'directory'])
526
for trans_id in self._removed_id:
527
file_id = self.tree_file_id(trans_id)
528
if file_id is not None:
529
# XXX: This seems like something that should go via a different
531
if self._tree.inventory[file_id].kind == 'directory':
532
parents.append(trans_id)
533
elif self.tree_kind(trans_id) == 'directory':
534
parents.append(trans_id)
536
for parent_id in parents:
537
# ensure that all children are registered with the transaction
538
list(self.iter_tree_children(parent_id))
540
@deprecated_method(deprecated_in((2, 3, 0)))
541
def has_named_child(self, by_parent, parent_id, name):
542
return self._has_named_child(
543
name, parent_id, known_children=by_parent.get(parent_id, []))
545
def _has_named_child(self, name, parent_id, known_children):
546
"""Does a parent already have a name child.
548
:param name: The searched for name.
550
:param parent_id: The parent for which the check is made.
552
:param known_children: The already known children. This should have
553
been recently obtained from `self.by_parent.get(parent_id)`
554
(or will be if None is passed).
556
if known_children is None:
557
known_children = self.by_parent().get(parent_id, [])
558
for child in known_children:
559
if self.final_name(child) == name:
561
parent_path = self._tree_id_paths.get(parent_id, None)
562
if parent_path is None:
563
# No parent... no children
565
child_path = joinpath(parent_path, name)
566
child_id = self._tree_path_ids.get(child_path, None)
568
# Not known by the tree transform yet, check the filesystem
569
return osutils.lexists(self._tree.abspath(child_path))
571
raise AssertionError('child_id is missing: %s, %s, %s'
572
% (name, parent_id, child_id))
574
def _available_backup_name(self, name, target_id):
575
"""Find an available backup name.
577
:param name: The basename of the file.
579
:param target_id: The directory trans_id where the backup should
582
known_children = self.by_parent().get(target_id, [])
583
return osutils.available_backup_name(
585
lambda base: self._has_named_child(
586
base, target_id, known_children))
588
def _parent_loops(self):
589
"""No entry should be its own ancestor"""
591
for trans_id in self._new_parent:
594
while parent_id is not ROOT_PARENT:
597
parent_id = self.final_parent(parent_id)
600
if parent_id == trans_id:
601
conflicts.append(('parent loop', trans_id))
602
if parent_id in seen:
606
def _unversioned_parents(self, by_parent):
607
"""If parent directories are versioned, children must be versioned."""
609
for parent_id, children in by_parent.iteritems():
610
if parent_id is ROOT_PARENT:
612
if self.final_file_id(parent_id) is not None:
614
for child_id in children:
615
if self.final_file_id(child_id) is not None:
616
conflicts.append(('unversioned parent', parent_id))
620
def _improper_versioning(self):
621
"""Cannot version a file with no contents, or a bad type.
623
However, existing entries with no contents are okay.
626
for trans_id in self._new_id.iterkeys():
627
kind = self.final_kind(trans_id)
629
conflicts.append(('versioning no contents', trans_id))
631
if not inventory.InventoryEntry.versionable_kind(kind):
632
conflicts.append(('versioning bad kind', trans_id, kind))
635
def _executability_conflicts(self):
636
"""Check for bad executability changes.
638
Only versioned files may have their executability set, because
639
1. only versioned entries can have executability under windows
640
2. only files can be executable. (The execute bit on a directory
641
does not indicate searchability)
644
for trans_id in self._new_executability:
645
if self.final_file_id(trans_id) is None:
646
conflicts.append(('unversioned executability', trans_id))
648
if self.final_kind(trans_id) != "file":
649
conflicts.append(('non-file executability', trans_id))
652
def _overwrite_conflicts(self):
653
"""Check for overwrites (not permitted on Win32)"""
655
for trans_id in self._new_contents:
656
if self.tree_kind(trans_id) is None:
658
if trans_id not in self._removed_contents:
659
conflicts.append(('overwrite', trans_id,
660
self.final_name(trans_id)))
663
def _duplicate_entries(self, by_parent):
664
"""No directory may have two entries with the same name."""
666
if (self._new_name, self._new_parent) == ({}, {}):
668
for children in by_parent.itervalues():
670
for child_tid in children:
671
name = self.final_name(child_tid)
673
# Keep children only if they still exist in the end
674
if not self._case_sensitive_target:
676
name_ids.append((name, child_tid))
680
for name, trans_id in name_ids:
681
kind = self.final_kind(trans_id)
682
file_id = self.final_file_id(trans_id)
683
if kind is None and file_id is None:
685
if name == last_name:
686
conflicts.append(('duplicate', last_trans_id, trans_id,
689
last_trans_id = trans_id
692
def _duplicate_ids(self):
693
"""Each inventory id may only be used once"""
695
removed_tree_ids = set((self.tree_file_id(trans_id) for trans_id in
697
all_ids = self._tree.all_file_ids()
698
active_tree_ids = all_ids.difference(removed_tree_ids)
699
for trans_id, file_id in self._new_id.iteritems():
700
if file_id in active_tree_ids:
701
old_trans_id = self.trans_id_tree_file_id(file_id)
702
conflicts.append(('duplicate id', old_trans_id, trans_id))
705
def _parent_type_conflicts(self, by_parent):
706
"""Children must have a directory parent"""
708
for parent_id, children in by_parent.iteritems():
709
if parent_id is ROOT_PARENT:
712
for child_id in children:
713
if self.final_kind(child_id) is not None:
718
# There is at least a child, so we need an existing directory to
720
kind = self.final_kind(parent_id)
722
# The directory will be deleted
723
conflicts.append(('missing parent', parent_id))
724
elif kind != "directory":
725
# Meh, we need a *directory* to put something in it
726
conflicts.append(('non-directory parent', parent_id))
729
def _set_executability(self, path, trans_id):
730
"""Set the executability of versioned files """
731
if supports_executable():
732
new_executability = self._new_executability[trans_id]
733
abspath = self._tree.abspath(path)
734
current_mode = os.stat(abspath).st_mode
735
if new_executability:
738
to_mode = current_mode | (0100 & ~umask)
739
# Enable x-bit for others only if they can read it.
740
if current_mode & 0004:
741
to_mode |= 0001 & ~umask
742
if current_mode & 0040:
743
to_mode |= 0010 & ~umask
745
to_mode = current_mode & ~0111
746
os.chmod(abspath, to_mode)
748
def _new_entry(self, name, parent_id, file_id):
749
"""Helper function to create a new filesystem entry."""
750
trans_id = self.create_path(name, parent_id)
751
if file_id is not None:
752
self.version_file(file_id, trans_id)
755
def new_file(self, name, parent_id, contents, file_id=None,
756
executable=None, sha1=None):
757
"""Convenience method to create files.
759
name is the name of the file to create.
760
parent_id is the transaction id of the parent directory of the file.
761
contents is an iterator of bytestrings, which will be used to produce
763
:param file_id: The inventory ID of the file, if it is to be versioned.
764
:param executable: Only valid when a file_id has been supplied.
766
trans_id = self._new_entry(name, parent_id, file_id)
767
# TODO: rather than scheduling a set_executable call,
768
# have create_file create the file with the right mode.
769
self.create_file(contents, trans_id, sha1=sha1)
770
if executable is not None:
771
self.set_executability(executable, trans_id)
774
def new_directory(self, name, parent_id, file_id=None):
775
"""Convenience method to create directories.
777
name is the name of the directory to create.
778
parent_id is the transaction id of the parent directory of the
780
file_id is the inventory ID of the directory, if it is to be versioned.
782
trans_id = self._new_entry(name, parent_id, file_id)
783
self.create_directory(trans_id)
786
def new_symlink(self, name, parent_id, target, file_id=None):
787
"""Convenience method to create symbolic link.
789
name is the name of the symlink to create.
790
parent_id is the transaction id of the parent directory of the symlink.
791
target is a bytestring of the target of the symlink.
792
file_id is the inventory ID of the file, if it is to be versioned.
794
trans_id = self._new_entry(name, parent_id, file_id)
795
self.create_symlink(target, trans_id)
798
def new_orphan(self, trans_id, parent_id):
799
"""Schedule an item to be orphaned.
801
When a directory is about to be removed, its children, if they are not
802
versioned are moved out of the way: they don't have a parent anymore.
804
:param trans_id: The trans_id of the existing item.
805
:param parent_id: The parent trans_id of the item.
807
raise NotImplementedError(self.new_orphan)
809
def _get_potential_orphans(self, dir_id):
810
"""Find the potential orphans in a directory.
812
A directory can't be safely deleted if there are versioned files in it.
813
If all the contained files are unversioned then they can be orphaned.
815
The 'None' return value means that the directory contains at least one
816
versioned file and should not be deleted.
818
:param dir_id: The directory trans id.
820
:return: A list of the orphan trans ids or None if at least one
821
versioned file is present.
824
# Find the potential orphans, stop if one item should be kept
825
for child_tid in self.by_parent()[dir_id]:
826
if child_tid in self._removed_contents:
827
# The child is removed as part of the transform. Since it was
828
# versioned before, it's not an orphan
830
elif self.final_file_id(child_tid) is None:
831
# The child is not versioned
832
orphans.append(child_tid)
834
# We have a versioned file here, searching for orphans is
840
def _affected_ids(self):
841
"""Return the set of transform ids affected by the transform"""
842
trans_ids = set(self._removed_id)
843
trans_ids.update(self._new_id.keys())
844
trans_ids.update(self._removed_contents)
845
trans_ids.update(self._new_contents.keys())
846
trans_ids.update(self._new_executability.keys())
847
trans_ids.update(self._new_name.keys())
848
trans_ids.update(self._new_parent.keys())
851
def _get_file_id_maps(self):
852
"""Return mapping of file_ids to trans_ids in the to and from states"""
853
trans_ids = self._affected_ids()
856
# Build up two dicts: trans_ids associated with file ids in the
857
# FROM state, vs the TO state.
858
for trans_id in trans_ids:
859
from_file_id = self.tree_file_id(trans_id)
860
if from_file_id is not None:
861
from_trans_ids[from_file_id] = trans_id
862
to_file_id = self.final_file_id(trans_id)
863
if to_file_id is not None:
864
to_trans_ids[to_file_id] = trans_id
865
return from_trans_ids, to_trans_ids
867
def _from_file_data(self, from_trans_id, from_versioned, file_id):
868
"""Get data about a file in the from (tree) state
870
Return a (name, parent, kind, executable) tuple
872
from_path = self._tree_id_paths.get(from_trans_id)
874
# get data from working tree if versioned
875
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
876
from_name = from_entry.name
877
from_parent = from_entry.parent_id
880
if from_path is None:
881
# File does not exist in FROM state
885
# File exists, but is not versioned. Have to use path-
887
from_name = os.path.basename(from_path)
888
tree_parent = self.get_tree_parent(from_trans_id)
889
from_parent = self.tree_file_id(tree_parent)
890
if from_path is not None:
891
from_kind, from_executable, from_stats = \
892
self._tree._comparison_data(from_entry, from_path)
895
from_executable = False
896
return from_name, from_parent, from_kind, from_executable
898
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
899
"""Get data about a file in the to (target) state
901
Return a (name, parent, kind, executable) tuple
903
to_name = self.final_name(to_trans_id)
904
to_kind = self.final_kind(to_trans_id)
905
to_parent = self.final_file_id(self.final_parent(to_trans_id))
906
if to_trans_id in self._new_executability:
907
to_executable = self._new_executability[to_trans_id]
908
elif to_trans_id == from_trans_id:
909
to_executable = from_executable
911
to_executable = False
912
return to_name, to_parent, to_kind, to_executable
914
def iter_changes(self):
915
"""Produce output in the same format as Tree.iter_changes.
917
Will produce nonsensical results if invoked while inventory/filesystem
918
conflicts (as reported by TreeTransform.find_conflicts()) are present.
920
This reads the Transform, but only reproduces changes involving a
921
file_id. Files that are not versioned in either of the FROM or TO
922
states are not reflected.
924
final_paths = FinalPaths(self)
925
from_trans_ids, to_trans_ids = self._get_file_id_maps()
927
# Now iterate through all active file_ids
928
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
930
from_trans_id = from_trans_ids.get(file_id)
931
# find file ids, and determine versioning state
932
if from_trans_id is None:
933
from_versioned = False
934
from_trans_id = to_trans_ids[file_id]
936
from_versioned = True
937
to_trans_id = to_trans_ids.get(file_id)
938
if to_trans_id is None:
940
to_trans_id = from_trans_id
944
from_name, from_parent, from_kind, from_executable = \
945
self._from_file_data(from_trans_id, from_versioned, file_id)
947
to_name, to_parent, to_kind, to_executable = \
948
self._to_file_data(to_trans_id, from_trans_id, from_executable)
950
if not from_versioned:
953
from_path = self._tree_id_paths.get(from_trans_id)
957
to_path = final_paths.get_path(to_trans_id)
958
if from_kind != to_kind:
960
elif to_kind in ('file', 'symlink') and (
961
to_trans_id != from_trans_id or
962
to_trans_id in self._new_contents):
964
if (not modified and from_versioned == to_versioned and
965
from_parent==to_parent and from_name == to_name and
966
from_executable == to_executable):
968
results.append((file_id, (from_path, to_path), modified,
969
(from_versioned, to_versioned),
970
(from_parent, to_parent),
971
(from_name, to_name),
972
(from_kind, to_kind),
973
(from_executable, to_executable)))
974
return iter(sorted(results, key=lambda x:x[1]))
976
def get_preview_tree(self):
977
"""Return a tree representing the result of the transform.
979
The tree is a snapshot, and altering the TreeTransform will invalidate
982
return _PreviewTree(self)
984
def commit(self, branch, message, merge_parents=None, strict=False,
985
timestamp=None, timezone=None, committer=None, authors=None,
986
revprops=None, revision_id=None):
987
"""Commit the result of this TreeTransform to a branch.
989
:param branch: The branch to commit to.
990
:param message: The message to attach to the commit.
991
:param merge_parents: Additional parent revision-ids specified by
993
:param strict: If True, abort the commit if there are unversioned
995
:param timestamp: if not None, seconds-since-epoch for the time and
996
date. (May be a float.)
997
:param timezone: Optional timezone for timestamp, as an offset in
999
:param committer: Optional committer in email-id format.
1000
(e.g. "J Random Hacker <jrandom@example.com>")
1001
:param authors: Optional list of authors in email-id format.
1002
:param revprops: Optional dictionary of revision properties.
1003
:param revision_id: Optional revision id. (Specifying a revision-id
1004
may reduce performance for some non-native formats.)
1005
:return: The revision_id of the revision committed.
1007
self._check_malformed()
1009
unversioned = set(self._new_contents).difference(set(self._new_id))
1010
for trans_id in unversioned:
1011
if self.final_file_id(trans_id) is None:
1012
raise errors.StrictCommitFailed()
1014
revno, last_rev_id = branch.last_revision_info()
1015
if last_rev_id == _mod_revision.NULL_REVISION:
1016
if merge_parents is not None:
1017
raise ValueError('Cannot supply merge parents for first'
1021
parent_ids = [last_rev_id]
1022
if merge_parents is not None:
1023
parent_ids.extend(merge_parents)
1024
if self._tree.get_revision_id() != last_rev_id:
1025
raise ValueError('TreeTransform not based on branch basis: %s' %
1026
self._tree.get_revision_id())
1027
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1028
builder = branch.get_commit_builder(parent_ids,
1029
timestamp=timestamp,
1031
committer=committer,
1033
revision_id=revision_id)
1034
preview = self.get_preview_tree()
1035
list(builder.record_iter_changes(preview, last_rev_id,
1036
self.iter_changes()))
1037
builder.finish_inventory()
1038
revision_id = builder.commit(message)
1039
branch.set_last_revision_info(revno + 1, revision_id)
1042
def _text_parent(self, trans_id):
1043
file_id = self.tree_file_id(trans_id)
1045
if file_id is None or self._tree.kind(file_id) != 'file':
1047
except errors.NoSuchFile:
1051
def _get_parents_texts(self, trans_id):
1052
"""Get texts for compression parents of this file."""
1053
file_id = self._text_parent(trans_id)
1056
return (self._tree.get_file_text(file_id),)
1058
def _get_parents_lines(self, trans_id):
1059
"""Get lines for compression parents of this file."""
1060
file_id = self._text_parent(trans_id)
1063
return (self._tree.get_file_lines(file_id),)
1065
def serialize(self, serializer):
1066
"""Serialize this TreeTransform.
1068
:param serializer: A Serialiser like pack.ContainerSerializer.
1070
new_name = dict((k, v.encode('utf-8')) for k, v in
1071
self._new_name.items())
1072
new_executability = dict((k, int(v)) for k, v in
1073
self._new_executability.items())
1074
tree_path_ids = dict((k.encode('utf-8'), v)
1075
for k, v in self._tree_path_ids.items())
1077
'_id_number': self._id_number,
1078
'_new_name': new_name,
1079
'_new_parent': self._new_parent,
1080
'_new_executability': new_executability,
1081
'_new_id': self._new_id,
1082
'_tree_path_ids': tree_path_ids,
1083
'_removed_id': list(self._removed_id),
1084
'_removed_contents': list(self._removed_contents),
1085
'_non_present_ids': self._non_present_ids,
1087
yield serializer.bytes_record(bencode.bencode(attribs),
1089
for trans_id, kind in self._new_contents.items():
1091
lines = osutils.chunks_to_lines(
1092
self._read_file_chunks(trans_id))
1093
parents = self._get_parents_lines(trans_id)
1094
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1095
content = ''.join(mpdiff.to_patch())
1096
if kind == 'directory':
1098
if kind == 'symlink':
1099
content = self._read_symlink_target(trans_id)
1100
yield serializer.bytes_record(content, ((trans_id, kind),))
1102
def deserialize(self, records):
1103
"""Deserialize a stored TreeTransform.
1105
:param records: An iterable of (names, content) tuples, as per
1106
pack.ContainerPushParser.
1108
names, content = records.next()
1109
attribs = bencode.bdecode(content)
1110
self._id_number = attribs['_id_number']
1111
self._new_name = dict((k, v.decode('utf-8'))
1112
for k, v in attribs['_new_name'].items())
1113
self._new_parent = attribs['_new_parent']
1114
self._new_executability = dict((k, bool(v)) for k, v in
1115
attribs['_new_executability'].items())
1116
self._new_id = attribs['_new_id']
1117
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1118
self._tree_path_ids = {}
1119
self._tree_id_paths = {}
1120
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1121
path = bytepath.decode('utf-8')
1122
self._tree_path_ids[path] = trans_id
1123
self._tree_id_paths[trans_id] = path
1124
self._removed_id = set(attribs['_removed_id'])
1125
self._removed_contents = set(attribs['_removed_contents'])
1126
self._non_present_ids = attribs['_non_present_ids']
1127
for ((trans_id, kind),), content in records:
1129
mpdiff = multiparent.MultiParent.from_patch(content)
1130
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1131
self.create_file(lines, trans_id)
1132
if kind == 'directory':
1133
self.create_directory(trans_id)
1134
if kind == 'symlink':
1135
self.create_symlink(content.decode('utf-8'), trans_id)
1138
class DiskTreeTransform(TreeTransformBase):
1139
"""Tree transform storing its contents on disk."""
1141
def __init__(self, tree, limbodir, pb=None,
1142
case_sensitive=True):
1144
:param tree: The tree that will be transformed, but not necessarily
1146
:param limbodir: A directory where new files can be stored until
1147
they are installed in their proper places
1149
:param case_sensitive: If True, the target of the transform is
1150
case sensitive, not just case preserving.
1152
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1153
self._limbodir = limbodir
1154
self._deletiondir = None
1155
# A mapping of transform ids to their limbo filename
1156
self._limbo_files = {}
1157
self._possibly_stale_limbo_files = set()
1158
# A mapping of transform ids to a set of the transform ids of children
1159
# that their limbo directory has
1160
self._limbo_children = {}
1161
# Map transform ids to maps of child filename to child transform id
1162
self._limbo_children_names = {}
1163
# List of transform ids that need to be renamed from limbo into place
1164
self._needs_rename = set()
1165
self._creation_mtime = None
1168
"""Release the working tree lock, if held, clean up limbo dir.
1170
This is required if apply has not been invoked, but can be invoked
1173
if self._tree is None:
1176
limbo_paths = self._limbo_files.values() + list(
1177
self._possibly_stale_limbo_files)
1178
limbo_paths = sorted(limbo_paths, reverse=True)
1179
for path in limbo_paths:
1183
if e.errno != errno.ENOENT:
1185
# XXX: warn? perhaps we just got interrupted at an
1186
# inconvenient moment, but perhaps files are disappearing
1189
delete_any(self._limbodir)
1191
# We don't especially care *why* the dir is immortal.
1192
raise ImmortalLimbo(self._limbodir)
1194
if self._deletiondir is not None:
1195
delete_any(self._deletiondir)
1197
raise errors.ImmortalPendingDeletion(self._deletiondir)
1199
TreeTransformBase.finalize(self)
1201
def _limbo_name(self, trans_id):
1202
"""Generate the limbo name of a file"""
1203
limbo_name = self._limbo_files.get(trans_id)
1204
if limbo_name is None:
1205
limbo_name = self._generate_limbo_path(trans_id)
1206
self._limbo_files[trans_id] = limbo_name
1209
def _generate_limbo_path(self, trans_id):
1210
"""Generate a limbo path using the trans_id as the relative path.
1212
This is suitable as a fallback, and when the transform should not be
1213
sensitive to the path encoding of the limbo directory.
1215
self._needs_rename.add(trans_id)
1216
return pathjoin(self._limbodir, trans_id)
1218
def adjust_path(self, name, parent, trans_id):
1219
previous_parent = self._new_parent.get(trans_id)
1220
previous_name = self._new_name.get(trans_id)
1221
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1222
if (trans_id in self._limbo_files and
1223
trans_id not in self._needs_rename):
1224
self._rename_in_limbo([trans_id])
1225
if previous_parent != parent:
1226
self._limbo_children[previous_parent].remove(trans_id)
1227
if previous_parent != parent or previous_name != name:
1228
del self._limbo_children_names[previous_parent][previous_name]
1230
def _rename_in_limbo(self, trans_ids):
1231
"""Fix limbo names so that the right final path is produced.
1233
This means we outsmarted ourselves-- we tried to avoid renaming
1234
these files later by creating them with their final names in their
1235
final parents. But now the previous name or parent is no longer
1236
suitable, so we have to rename them.
1238
Even for trans_ids that have no new contents, we must remove their
1239
entries from _limbo_files, because they are now stale.
1241
for trans_id in trans_ids:
1242
old_path = self._limbo_files[trans_id]
1243
self._possibly_stale_limbo_files.add(old_path)
1244
del self._limbo_files[trans_id]
1245
if trans_id not in self._new_contents:
1247
new_path = self._limbo_name(trans_id)
1248
os.rename(old_path, new_path)
1249
self._possibly_stale_limbo_files.remove(old_path)
1250
for descendant in self._limbo_descendants(trans_id):
1251
desc_path = self._limbo_files[descendant]
1252
desc_path = new_path + desc_path[len(old_path):]
1253
self._limbo_files[descendant] = desc_path
1255
def _limbo_descendants(self, trans_id):
1256
"""Return the set of trans_ids whose limbo paths descend from this."""
1257
descendants = set(self._limbo_children.get(trans_id, []))
1258
for descendant in list(descendants):
1259
descendants.update(self._limbo_descendants(descendant))
1262
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1263
"""Schedule creation of a new file.
1267
:param contents: an iterator of strings, all of which will be written
1268
to the target destination.
1269
:param trans_id: TreeTransform handle
1270
:param mode_id: If not None, force the mode of the target file to match
1271
the mode of the object referenced by mode_id.
1272
Otherwise, we will try to preserve mode bits of an existing file.
1273
:param sha1: If the sha1 of this content is already known, pass it in.
1274
We can use it to prevent future sha1 computations.
1276
name = self._limbo_name(trans_id)
1277
f = open(name, 'wb')
1279
unique_add(self._new_contents, trans_id, 'file')
1280
f.writelines(contents)
1283
self._set_mtime(name)
1284
self._set_mode(trans_id, mode_id, S_ISREG)
1285
# It is unfortunate we have to use lstat instead of fstat, but we just
1286
# used utime and chmod on the file, so we need the accurate final
1288
if sha1 is not None:
1289
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1291
def _read_file_chunks(self, trans_id):
1292
cur_file = open(self._limbo_name(trans_id), 'rb')
1294
return cur_file.readlines()
1298
def _read_symlink_target(self, trans_id):
1299
return os.readlink(self._limbo_name(trans_id))
1301
def _set_mtime(self, path):
1302
"""All files that are created get the same mtime.
1304
This time is set by the first object to be created.
1306
if self._creation_mtime is None:
1307
self._creation_mtime = time.time()
1308
os.utime(path, (self._creation_mtime, self._creation_mtime))
1310
def create_hardlink(self, path, trans_id):
1311
"""Schedule creation of a hard link"""
1312
name = self._limbo_name(trans_id)
1316
if e.errno != errno.EPERM:
1318
raise errors.HardLinkNotSupported(path)
1320
unique_add(self._new_contents, trans_id, 'file')
1322
# Clean up the file, it never got registered so
1323
# TreeTransform.finalize() won't clean it up.
1327
def create_directory(self, trans_id):
1328
"""Schedule creation of a new directory.
1330
See also new_directory.
1332
os.mkdir(self._limbo_name(trans_id))
1333
unique_add(self._new_contents, trans_id, 'directory')
1335
def create_symlink(self, target, trans_id):
1336
"""Schedule creation of a new symbolic link.
1338
target is a bytestring.
1339
See also new_symlink.
1342
os.symlink(target, self._limbo_name(trans_id))
1343
unique_add(self._new_contents, trans_id, 'symlink')
1346
path = FinalPaths(self).get_path(trans_id)
1349
raise UnableCreateSymlink(path=path)
1351
def cancel_creation(self, trans_id):
1352
"""Cancel the creation of new file contents."""
1353
del self._new_contents[trans_id]
1354
if trans_id in self._observed_sha1s:
1355
del self._observed_sha1s[trans_id]
1356
children = self._limbo_children.get(trans_id)
1357
# if this is a limbo directory with children, move them before removing
1359
if children is not None:
1360
self._rename_in_limbo(children)
1361
del self._limbo_children[trans_id]
1362
del self._limbo_children_names[trans_id]
1363
delete_any(self._limbo_name(trans_id))
1365
def new_orphan(self, trans_id, parent_id):
1366
# FIXME: There is no tree config, so we use the branch one (it's weird
1367
# to define it this way as orphaning can only occur in a working tree,
1368
# but that's all we have (for now). It will find the option in
1369
# locations.conf or bazaar.conf though) -- vila 20100916
1370
conf = self._tree.branch.get_config()
1371
conf_var_name = 'bzr.transform.orphan_policy'
1372
orphan_policy = conf.get_user_option(conf_var_name)
1373
default_policy = orphaning_registry.default_key
1374
if orphan_policy is None:
1375
orphan_policy = default_policy
1376
if orphan_policy not in orphaning_registry:
1377
trace.warning('%s (from %s) is not a known policy, defaulting '
1378
'to %s' % (orphan_policy, conf_var_name, default_policy))
1379
orphan_policy = default_policy
1380
handle_orphan = orphaning_registry.get(orphan_policy)
1381
handle_orphan(self, trans_id, parent_id)
1384
class OrphaningError(errors.BzrError):
1386
# Only bugs could lead to such exception being seen by the user
1387
internal_error = True
1388
_fmt = "Error while orphaning %s in %s directory"
1390
def __init__(self, orphan, parent):
1391
errors.BzrError.__init__(self)
1392
self.orphan = orphan
1393
self.parent = parent
1396
class OrphaningForbidden(OrphaningError):
1398
_fmt = "Policy: %s doesn't allow creating orphans."
1400
def __init__(self, policy):
1401
errors.BzrError.__init__(self)
1402
self.policy = policy
1405
def move_orphan(tt, orphan_id, parent_id):
1406
"""See TreeTransformBase.new_orphan.
1408
This creates a new orphan in the `bzr-orphans` dir at the root of the
1411
:param tt: The TreeTransform orphaning `trans_id`.
1413
:param orphan_id: The trans id that should be orphaned.
1415
:param parent_id: The orphan parent trans id.
1417
# Add the orphan dir if it doesn't exist
1418
orphan_dir_basename = 'bzr-orphans'
1419
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1420
if tt.final_kind(od_id) is None:
1421
tt.create_directory(od_id)
1422
parent_path = tt._tree_id_paths[parent_id]
1423
# Find a name that doesn't exist yet in the orphan dir
1424
actual_name = tt.final_name(orphan_id)
1425
new_name = tt._available_backup_name(actual_name, od_id)
1426
tt.adjust_path(new_name, od_id, orphan_id)
1427
trace.warning('%s has been orphaned in %s'
1428
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1431
def refuse_orphan(tt, orphan_id, parent_id):
1432
"""See TreeTransformBase.new_orphan.
1434
This refuses to create orphan, letting the caller handle the conflict.
1436
raise OrphaningForbidden('never')
1439
orphaning_registry = registry.Registry()
1440
orphaning_registry.register(
1441
'conflict', refuse_orphan,
1442
'Leave orphans in place and create a conflict on the directory.')
1443
orphaning_registry.register(
1444
'move', move_orphan,
1445
'Move orphans into the bzr-orphans directory.')
1446
orphaning_registry._set_default_key('conflict')
1449
class TreeTransform(DiskTreeTransform):
1450
"""Represent a tree transformation.
1452
This object is designed to support incremental generation of the transform,
1455
However, it gives optimum performance when parent directories are created
1456
before their contents. The transform is then able to put child files
1457
directly in their parent directory, avoiding later renames.
1459
It is easy to produce malformed transforms, but they are generally
1460
harmless. Attempting to apply a malformed transform will cause an
1461
exception to be raised before any modifications are made to the tree.
1463
Many kinds of malformed transforms can be corrected with the
1464
resolve_conflicts function. The remaining ones indicate programming error,
1465
such as trying to create a file with no path.
1467
Two sets of file creation methods are supplied. Convenience methods are:
1472
These are composed of the low-level methods:
1474
* create_file or create_directory or create_symlink
1478
Transform/Transaction ids
1479
-------------------------
1480
trans_ids are temporary ids assigned to all files involved in a transform.
1481
It's possible, even common, that not all files in the Tree have trans_ids.
1483
trans_ids are used because filenames and file_ids are not good enough
1484
identifiers; filenames change, and not all files have file_ids. File-ids
1485
are also associated with trans-ids, so that moving a file moves its
1488
trans_ids are only valid for the TreeTransform that generated them.
1492
Limbo is a temporary directory use to hold new versions of files.
1493
Files are added to limbo by create_file, create_directory, create_symlink,
1494
and their convenience variants (new_*). Files may be removed from limbo
1495
using cancel_creation. Files are renamed from limbo into their final
1496
location as part of TreeTransform.apply
1498
Limbo must be cleaned up, by either calling TreeTransform.apply or
1499
calling TreeTransform.finalize.
1501
Files are placed into limbo inside their parent directories, where
1502
possible. This reduces subsequent renames, and makes operations involving
1503
lots of files faster. This optimization is only possible if the parent
1504
directory is created *before* creating any of its children, so avoid
1505
creating children before parents, where possible.
1509
This temporary directory is used by _FileMover for storing files that are
1510
about to be deleted. In case of rollback, the files will be restored.
1511
FileMover does not delete files until it is sure that a rollback will not
1514
def __init__(self, tree, pb=None):
1515
"""Note: a tree_write lock is taken on the tree.
1517
Use TreeTransform.finalize() to release the lock (can be omitted if
1518
TreeTransform.apply() called).
1520
tree.lock_tree_write()
1523
limbodir = urlutils.local_path_from_url(
1524
tree._transport.abspath('limbo'))
1528
if e.errno == errno.EEXIST:
1529
raise ExistingLimbo(limbodir)
1530
deletiondir = urlutils.local_path_from_url(
1531
tree._transport.abspath('pending-deletion'))
1533
os.mkdir(deletiondir)
1535
if e.errno == errno.EEXIST:
1536
raise errors.ExistingPendingDeletion(deletiondir)
1541
# Cache of realpath results, to speed up canonical_path
1542
self._realpaths = {}
1543
# Cache of relpath results, to speed up canonical_path
1545
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1546
tree.case_sensitive)
1547
self._deletiondir = deletiondir
1549
def canonical_path(self, path):
1550
"""Get the canonical tree-relative path"""
1551
# don't follow final symlinks
1552
abs = self._tree.abspath(path)
1553
if abs in self._relpaths:
1554
return self._relpaths[abs]
1555
dirname, basename = os.path.split(abs)
1556
if dirname not in self._realpaths:
1557
self._realpaths[dirname] = os.path.realpath(dirname)
1558
dirname = self._realpaths[dirname]
1559
abs = pathjoin(dirname, basename)
1560
if dirname in self._relpaths:
1561
relpath = pathjoin(self._relpaths[dirname], basename)
1562
relpath = relpath.rstrip('/\\')
1564
relpath = self._tree.relpath(abs)
1565
self._relpaths[abs] = relpath
1568
def tree_kind(self, trans_id):
1569
"""Determine the file kind in the working tree.
1571
:returns: The file kind or None if the file does not exist
1573
path = self._tree_id_paths.get(trans_id)
1577
return file_kind(self._tree.abspath(path))
1578
except errors.NoSuchFile:
1581
def _set_mode(self, trans_id, mode_id, typefunc):
1582
"""Set the mode of new file contents.
1583
The mode_id is the existing file to get the mode from (often the same
1584
as trans_id). The operation is only performed if there's a mode match
1585
according to typefunc.
1590
old_path = self._tree_id_paths[mode_id]
1594
mode = os.stat(self._tree.abspath(old_path)).st_mode
1596
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1597
# Either old_path doesn't exist, or the parent of the
1598
# target is not a directory (but will be one eventually)
1599
# Either way, we know it doesn't exist *right now*
1600
# See also bug #248448
1605
os.chmod(self._limbo_name(trans_id), mode)
1607
def iter_tree_children(self, parent_id):
1608
"""Iterate through the entry's tree children, if any"""
1610
path = self._tree_id_paths[parent_id]
1614
children = os.listdir(self._tree.abspath(path))
1616
if not (osutils._is_error_enotdir(e)
1617
or e.errno in (errno.ENOENT, errno.ESRCH)):
1621
for child in children:
1622
childpath = joinpath(path, child)
1623
if self._tree.is_control_filename(childpath):
1625
yield self.trans_id_tree_path(childpath)
1627
def _generate_limbo_path(self, trans_id):
1628
"""Generate a limbo path using the final path if possible.
1630
This optimizes the performance of applying the tree transform by
1631
avoiding renames. These renames can be avoided only when the parent
1632
directory is already scheduled for creation.
1634
If the final path cannot be used, falls back to using the trans_id as
1637
parent = self._new_parent.get(trans_id)
1638
# if the parent directory is already in limbo (e.g. when building a
1639
# tree), choose a limbo name inside the parent, to reduce further
1641
use_direct_path = False
1642
if self._new_contents.get(parent) == 'directory':
1643
filename = self._new_name.get(trans_id)
1644
if filename is not None:
1645
if parent not in self._limbo_children:
1646
self._limbo_children[parent] = set()
1647
self._limbo_children_names[parent] = {}
1648
use_direct_path = True
1649
# the direct path can only be used if no other file has
1650
# already taken this pathname, i.e. if the name is unused, or
1651
# if it is already associated with this trans_id.
1652
elif self._case_sensitive_target:
1653
if (self._limbo_children_names[parent].get(filename)
1654
in (trans_id, None)):
1655
use_direct_path = True
1657
for l_filename, l_trans_id in\
1658
self._limbo_children_names[parent].iteritems():
1659
if l_trans_id == trans_id:
1661
if l_filename.lower() == filename.lower():
1664
use_direct_path = True
1666
if not use_direct_path:
1667
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1669
limbo_name = pathjoin(self._limbo_files[parent], filename)
1670
self._limbo_children[parent].add(trans_id)
1671
self._limbo_children_names[parent][filename] = trans_id
1675
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1676
"""Apply all changes to the inventory and filesystem.
1678
If filesystem or inventory conflicts are present, MalformedTransform
1681
If apply succeeds, finalize is not necessary.
1683
:param no_conflicts: if True, the caller guarantees there are no
1684
conflicts, so no check is made.
1685
:param precomputed_delta: An inventory delta to use instead of
1687
:param _mover: Supply an alternate FileMover, for testing
1689
if not no_conflicts:
1690
self._check_malformed()
1691
child_pb = ui.ui_factory.nested_progress_bar()
1693
if precomputed_delta is None:
1694
child_pb.update('Apply phase', 0, 2)
1695
inventory_delta = self._generate_inventory_delta()
1698
inventory_delta = precomputed_delta
1701
mover = _FileMover()
1705
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1706
self._apply_removals(mover)
1707
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1708
modified_paths = self._apply_insertions(mover)
1713
mover.apply_deletions()
1716
self._tree.apply_inventory_delta(inventory_delta)
1717
self._apply_observed_sha1s()
1720
return _TransformResults(modified_paths, self.rename_count)
1722
def _generate_inventory_delta(self):
1723
"""Generate an inventory delta for the current transform."""
1724
inventory_delta = []
1725
child_pb = ui.ui_factory.nested_progress_bar()
1726
new_paths = self._inventory_altered()
1727
total_entries = len(new_paths) + len(self._removed_id)
1729
for num, trans_id in enumerate(self._removed_id):
1731
child_pb.update('removing file', num, total_entries)
1732
if trans_id == self._new_root:
1733
file_id = self._tree.get_root_id()
1735
file_id = self.tree_file_id(trans_id)
1736
# File-id isn't really being deleted, just moved
1737
if file_id in self._r_new_id:
1739
path = self._tree_id_paths[trans_id]
1740
inventory_delta.append((path, None, file_id, None))
1741
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1743
entries = self._tree.iter_entries_by_dir(
1744
new_path_file_ids.values())
1745
old_paths = dict((e.file_id, p) for p, e in entries)
1747
for num, (path, trans_id) in enumerate(new_paths):
1749
child_pb.update('adding file',
1750
num + len(self._removed_id), total_entries)
1751
file_id = new_path_file_ids[trans_id]
1755
kind = self.final_kind(trans_id)
1757
kind = self._tree.stored_kind(file_id)
1758
parent_trans_id = self.final_parent(trans_id)
1759
parent_file_id = new_path_file_ids.get(parent_trans_id)
1760
if parent_file_id is None:
1761
parent_file_id = self.final_file_id(parent_trans_id)
1762
if trans_id in self._new_reference_revision:
1763
new_entry = inventory.TreeReference(
1765
self._new_name[trans_id],
1766
self.final_file_id(self._new_parent[trans_id]),
1767
None, self._new_reference_revision[trans_id])
1769
new_entry = inventory.make_entry(kind,
1770
self.final_name(trans_id),
1771
parent_file_id, file_id)
1772
old_path = old_paths.get(new_entry.file_id)
1773
new_executability = self._new_executability.get(trans_id)
1774
if new_executability is not None:
1775
new_entry.executable = new_executability
1776
inventory_delta.append(
1777
(old_path, path, new_entry.file_id, new_entry))
1780
return inventory_delta
1782
def _apply_removals(self, mover):
1783
"""Perform tree operations that remove directory/inventory names.
1785
That is, delete files that are to be deleted, and put any files that
1786
need renaming into limbo. This must be done in strict child-to-parent
1789
If inventory_delta is None, no inventory delta generation is performed.
1791
tree_paths = list(self._tree_path_ids.iteritems())
1792
tree_paths.sort(reverse=True)
1793
child_pb = ui.ui_factory.nested_progress_bar()
1795
for num, data in enumerate(tree_paths):
1796
path, trans_id = data
1797
child_pb.update('removing file', num, len(tree_paths))
1798
full_path = self._tree.abspath(path)
1799
if trans_id in self._removed_contents:
1800
delete_path = os.path.join(self._deletiondir, trans_id)
1801
mover.pre_delete(full_path, delete_path)
1802
elif (trans_id in self._new_name
1803
or trans_id in self._new_parent):
1805
mover.rename(full_path, self._limbo_name(trans_id))
1806
except errors.TransformRenameFailed, e:
1807
if e.errno != errno.ENOENT:
1810
self.rename_count += 1
1814
def _apply_insertions(self, mover):
1815
"""Perform tree operations that insert directory/inventory names.
1817
That is, create any files that need to be created, and restore from
1818
limbo any files that needed renaming. This must be done in strict
1819
parent-to-child order.
1821
If inventory_delta is None, no inventory delta is calculated, and
1822
no list of modified paths is returned.
1824
new_paths = self.new_paths(filesystem_only=True)
1826
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1828
child_pb = ui.ui_factory.nested_progress_bar()
1830
for num, (path, trans_id) in enumerate(new_paths):
1832
child_pb.update('adding file', num, len(new_paths))
1833
full_path = self._tree.abspath(path)
1834
if trans_id in self._needs_rename:
1836
mover.rename(self._limbo_name(trans_id), full_path)
1837
except errors.TransformRenameFailed, e:
1838
# We may be renaming a dangling inventory id
1839
if e.errno != errno.ENOENT:
1842
self.rename_count += 1
1843
# TODO: if trans_id in self._observed_sha1s, we should
1844
# re-stat the final target, since ctime will be
1845
# updated by the change.
1846
if (trans_id in self._new_contents or
1847
self.path_changed(trans_id)):
1848
if trans_id in self._new_contents:
1849
modified_paths.append(full_path)
1850
if trans_id in self._new_executability:
1851
self._set_executability(path, trans_id)
1852
if trans_id in self._observed_sha1s:
1853
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1854
st = osutils.lstat(full_path)
1855
self._observed_sha1s[trans_id] = (o_sha1, st)
1858
for path, trans_id in new_paths:
1859
# new_paths includes stuff like workingtree conflicts. Only the
1860
# stuff in new_contents actually comes from limbo.
1861
if trans_id in self._limbo_files:
1862
del self._limbo_files[trans_id]
1863
self._new_contents.clear()
1864
return modified_paths
1866
def _apply_observed_sha1s(self):
1867
"""After we have finished renaming everything, update observed sha1s
1869
This has to be done after self._tree.apply_inventory_delta, otherwise
1870
it doesn't know anything about the files we are updating. Also, we want
1871
to do this as late as possible, so that most entries end up cached.
1873
# TODO: this doesn't update the stat information for directories. So
1874
# the first 'bzr status' will still need to rewrite
1875
# .bzr/checkout/dirstate. However, we at least don't need to
1876
# re-read all of the files.
1877
# TODO: If the operation took a while, we could do a time.sleep(3) here
1878
# to allow the clock to tick over and ensure we won't have any
1879
# problems. (we could observe start time, and finish time, and if
1880
# it is less than eg 10% overhead, add a sleep call.)
1881
paths = FinalPaths(self)
1882
for trans_id, observed in self._observed_sha1s.iteritems():
1883
path = paths.get_path(trans_id)
1884
# We could get the file_id, but dirstate prefers to use the path
1885
# anyway, and it is 'cheaper' to determine.
1886
# file_id = self._new_id[trans_id]
1887
self._tree._observed_sha1(None, path, observed)
1890
class TransformPreview(DiskTreeTransform):
1891
"""A TreeTransform for generating preview trees.
1893
Unlike TreeTransform, this version works when the input tree is a
1894
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1895
unversioned files in the input tree.
1898
def __init__(self, tree, pb=None, case_sensitive=True):
1900
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1901
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1903
def canonical_path(self, path):
1906
def tree_kind(self, trans_id):
1907
path = self._tree_id_paths.get(trans_id)
1910
kind = self._tree.path_content_summary(path)[0]
1911
if kind == 'missing':
1915
def _set_mode(self, trans_id, mode_id, typefunc):
1916
"""Set the mode of new file contents.
1917
The mode_id is the existing file to get the mode from (often the same
1918
as trans_id). The operation is only performed if there's a mode match
1919
according to typefunc.
1921
# is it ok to ignore this? probably
1924
def iter_tree_children(self, parent_id):
1925
"""Iterate through the entry's tree children, if any"""
1927
path = self._tree_id_paths[parent_id]
1930
file_id = self.tree_file_id(parent_id)
1933
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1934
children = getattr(entry, 'children', {})
1935
for child in children:
1936
childpath = joinpath(path, child)
1937
yield self.trans_id_tree_path(childpath)
1939
def new_orphan(self, trans_id, parent_id):
1940
raise NotImplementedError(self.new_orphan)
1943
class _PreviewTree(tree.InventoryTree):
1944
"""Partial implementation of Tree to support show_diff_trees"""
1946
def __init__(self, transform):
1947
self._transform = transform
1948
self._final_paths = FinalPaths(transform)
1949
self.__by_parent = None
1950
self._parent_ids = []
1951
self._all_children_cache = {}
1952
self._path2trans_id_cache = {}
1953
self._final_name_cache = {}
1954
self._iter_changes_cache = dict((c[0], c) for c in
1955
self._transform.iter_changes())
1957
def _content_change(self, file_id):
1958
"""Return True if the content of this file changed"""
1959
changes = self._iter_changes_cache.get(file_id)
1960
# changes[2] is true if the file content changed. See
1961
# InterTree.iter_changes.
1962
return (changes is not None and changes[2])
1964
def _get_repository(self):
1965
repo = getattr(self._transform._tree, '_repository', None)
1967
repo = self._transform._tree.branch.repository
1970
def _iter_parent_trees(self):
1971
for revision_id in self.get_parent_ids():
1973
yield self.revision_tree(revision_id)
1974
except errors.NoSuchRevisionInTree:
1975
yield self._get_repository().revision_tree(revision_id)
1977
def _get_file_revision(self, file_id, vf, tree_revision):
1978
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
1979
self._iter_parent_trees()]
1980
vf.add_lines((file_id, tree_revision), parent_keys,
1981
self.get_file_lines(file_id))
1982
repo = self._get_repository()
1983
base_vf = repo.texts
1984
if base_vf not in vf.fallback_versionedfiles:
1985
vf.fallback_versionedfiles.append(base_vf)
1986
return tree_revision
1988
def _stat_limbo_file(self, file_id=None, trans_id=None):
1989
if trans_id is None:
1990
trans_id = self._transform.trans_id_file_id(file_id)
1991
name = self._transform._limbo_name(trans_id)
1992
return os.lstat(name)
1995
def _by_parent(self):
1996
if self.__by_parent is None:
1997
self.__by_parent = self._transform.by_parent()
1998
return self.__by_parent
2000
def _comparison_data(self, entry, path):
2001
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2002
if kind == 'missing':
2006
file_id = self._transform.final_file_id(self._path2trans_id(path))
2007
executable = self.is_executable(file_id, path)
2008
return kind, executable, None
2010
def is_locked(self):
2013
def lock_read(self):
2014
# Perhaps in theory, this should lock the TreeTransform?
2021
def inventory(self):
2022
"""This Tree does not use inventory as its backing data."""
2023
raise NotImplementedError(_PreviewTree.inventory)
2025
def get_root_id(self):
2026
return self._transform.final_file_id(self._transform.root)
2028
def all_file_ids(self):
2029
tree_ids = set(self._transform._tree.all_file_ids())
2030
tree_ids.difference_update(self._transform.tree_file_id(t)
2031
for t in self._transform._removed_id)
2032
tree_ids.update(self._transform._new_id.values())
2036
return iter(self.all_file_ids())
2038
def _has_id(self, file_id, fallback_check):
2039
if file_id in self._transform._r_new_id:
2041
elif file_id in set([self._transform.tree_file_id(trans_id) for
2042
trans_id in self._transform._removed_id]):
2045
return fallback_check(file_id)
2047
def has_id(self, file_id):
2048
return self._has_id(file_id, self._transform._tree.has_id)
2050
def has_or_had_id(self, file_id):
2051
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2053
def _path2trans_id(self, path):
2054
# We must not use None here, because that is a valid value to store.
2055
trans_id = self._path2trans_id_cache.get(path, object)
2056
if trans_id is not object:
2058
segments = splitpath(path)
2059
cur_parent = self._transform.root
2060
for cur_segment in segments:
2061
for child in self._all_children(cur_parent):
2062
final_name = self._final_name_cache.get(child)
2063
if final_name is None:
2064
final_name = self._transform.final_name(child)
2065
self._final_name_cache[child] = final_name
2066
if final_name == cur_segment:
2070
self._path2trans_id_cache[path] = None
2072
self._path2trans_id_cache[path] = cur_parent
2075
def path2id(self, path):
2076
return self._transform.final_file_id(self._path2trans_id(path))
2078
def id2path(self, file_id):
2079
trans_id = self._transform.trans_id_file_id(file_id)
2081
return self._final_paths._determine_path(trans_id)
2083
raise errors.NoSuchId(self, file_id)
2085
def _all_children(self, trans_id):
2086
children = self._all_children_cache.get(trans_id)
2087
if children is not None:
2089
children = set(self._transform.iter_tree_children(trans_id))
2090
# children in the _new_parent set are provided by _by_parent.
2091
children.difference_update(self._transform._new_parent.keys())
2092
children.update(self._by_parent.get(trans_id, []))
2093
self._all_children_cache[trans_id] = children
2096
def iter_children(self, file_id):
2097
trans_id = self._transform.trans_id_file_id(file_id)
2098
for child_trans_id in self._all_children(trans_id):
2099
yield self._transform.final_file_id(child_trans_id)
2102
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2103
in self._transform._tree.extras())
2104
possible_extras.update(self._transform._new_contents)
2105
possible_extras.update(self._transform._removed_id)
2106
for trans_id in possible_extras:
2107
if self._transform.final_file_id(trans_id) is None:
2108
yield self._final_paths._determine_path(trans_id)
2110
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2111
yield_parents=False):
2112
for trans_id, parent_file_id in ordered_entries:
2113
file_id = self._transform.final_file_id(trans_id)
2116
if (specific_file_ids is not None
2117
and file_id not in specific_file_ids):
2119
kind = self._transform.final_kind(trans_id)
2121
kind = self._transform._tree.stored_kind(file_id)
2122
new_entry = inventory.make_entry(
2124
self._transform.final_name(trans_id),
2125
parent_file_id, file_id)
2126
yield new_entry, trans_id
2128
def _list_files_by_dir(self):
2129
todo = [ROOT_PARENT]
2131
while len(todo) > 0:
2133
parent_file_id = self._transform.final_file_id(parent)
2134
children = list(self._all_children(parent))
2135
paths = dict(zip(children, self._final_paths.get_paths(children)))
2136
children.sort(key=paths.get)
2137
todo.extend(reversed(children))
2138
for trans_id in children:
2139
ordered_ids.append((trans_id, parent_file_id))
2142
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2143
# This may not be a maximally efficient implementation, but it is
2144
# reasonably straightforward. An implementation that grafts the
2145
# TreeTransform changes onto the tree's iter_entries_by_dir results
2146
# might be more efficient, but requires tricky inferences about stack
2148
ordered_ids = self._list_files_by_dir()
2149
for entry, trans_id in self._make_inv_entries(ordered_ids,
2150
specific_file_ids, yield_parents=yield_parents):
2151
yield unicode(self._final_paths.get_path(trans_id)), entry
2153
def _iter_entries_for_dir(self, dir_path):
2154
"""Return path, entry for items in a directory without recursing down."""
2155
dir_file_id = self.path2id(dir_path)
2157
for file_id in self.iter_children(dir_file_id):
2158
trans_id = self._transform.trans_id_file_id(file_id)
2159
ordered_ids.append((trans_id, file_id))
2160
for entry, trans_id in self._make_inv_entries(ordered_ids):
2161
yield unicode(self._final_paths.get_path(trans_id)), entry
2163
def list_files(self, include_root=False, from_dir=None, recursive=True):
2164
"""See WorkingTree.list_files."""
2165
# XXX This should behave like WorkingTree.list_files, but is really
2166
# more like RevisionTree.list_files.
2170
prefix = from_dir + '/'
2171
entries = self.iter_entries_by_dir()
2172
for path, entry in entries:
2173
if entry.name == '' and not include_root:
2176
if not path.startswith(prefix):
2178
path = path[len(prefix):]
2179
yield path, 'V', entry.kind, entry.file_id, entry
2181
if from_dir is None and include_root is True:
2182
root_entry = inventory.make_entry('directory', '',
2183
ROOT_PARENT, self.get_root_id())
2184
yield '', 'V', 'directory', root_entry.file_id, root_entry
2185
entries = self._iter_entries_for_dir(from_dir or '')
2186
for path, entry in entries:
2187
yield path, 'V', entry.kind, entry.file_id, entry
2189
def kind(self, file_id):
2190
trans_id = self._transform.trans_id_file_id(file_id)
2191
return self._transform.final_kind(trans_id)
2193
def stored_kind(self, file_id):
2194
trans_id = self._transform.trans_id_file_id(file_id)
2196
return self._transform._new_contents[trans_id]
2198
return self._transform._tree.stored_kind(file_id)
2200
def get_file_mtime(self, file_id, path=None):
2201
"""See Tree.get_file_mtime"""
2202
if not self._content_change(file_id):
2203
return self._transform._tree.get_file_mtime(file_id)
2204
return self._stat_limbo_file(file_id).st_mtime
2206
def _file_size(self, entry, stat_value):
2207
return self.get_file_size(entry.file_id)
2209
def get_file_size(self, file_id):
2210
"""See Tree.get_file_size"""
2211
trans_id = self._transform.trans_id_file_id(file_id)
2212
kind = self._transform.final_kind(trans_id)
2215
if trans_id in self._transform._new_contents:
2216
return self._stat_limbo_file(trans_id=trans_id).st_size
2217
if self.kind(file_id) == 'file':
2218
return self._transform._tree.get_file_size(file_id)
2222
def get_file_sha1(self, file_id, path=None, stat_value=None):
2223
trans_id = self._transform.trans_id_file_id(file_id)
2224
kind = self._transform._new_contents.get(trans_id)
2226
return self._transform._tree.get_file_sha1(file_id)
2228
fileobj = self.get_file(file_id)
2230
return sha_file(fileobj)
2234
def is_executable(self, file_id, path=None):
2237
trans_id = self._transform.trans_id_file_id(file_id)
2239
return self._transform._new_executability[trans_id]
2242
return self._transform._tree.is_executable(file_id, path)
2244
if e.errno == errno.ENOENT:
2247
except errors.NoSuchId:
2250
def has_filename(self, path):
2251
trans_id = self._path2trans_id(path)
2252
if trans_id in self._transform._new_contents:
2254
elif trans_id in self._transform._removed_contents:
2257
return self._transform._tree.has_filename(path)
2259
def path_content_summary(self, path):
2260
trans_id = self._path2trans_id(path)
2261
tt = self._transform
2262
tree_path = tt._tree_id_paths.get(trans_id)
2263
kind = tt._new_contents.get(trans_id)
2265
if tree_path is None or trans_id in tt._removed_contents:
2266
return 'missing', None, None, None
2267
summary = tt._tree.path_content_summary(tree_path)
2268
kind, size, executable, link_or_sha1 = summary
2271
limbo_name = tt._limbo_name(trans_id)
2272
if trans_id in tt._new_reference_revision:
2273
kind = 'tree-reference'
2275
statval = os.lstat(limbo_name)
2276
size = statval.st_size
2277
if not supports_executable():
2280
executable = statval.st_mode & S_IEXEC
2284
if kind == 'symlink':
2285
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2286
executable = tt._new_executability.get(trans_id, executable)
2287
return kind, size, executable, link_or_sha1
2289
def iter_changes(self, from_tree, include_unchanged=False,
2290
specific_files=None, pb=None, extra_trees=None,
2291
require_versioned=True, want_unversioned=False):
2292
"""See InterTree.iter_changes.
2294
This has a fast path that is only used when the from_tree matches
2295
the transform tree, and no fancy options are supplied.
2297
if (from_tree is not self._transform._tree or include_unchanged or
2298
specific_files or want_unversioned):
2299
return tree.InterTree(from_tree, self).iter_changes(
2300
include_unchanged=include_unchanged,
2301
specific_files=specific_files,
2303
extra_trees=extra_trees,
2304
require_versioned=require_versioned,
2305
want_unversioned=want_unversioned)
2306
if want_unversioned:
2307
raise ValueError('want_unversioned is not supported')
2308
return self._transform.iter_changes()
2310
def get_file(self, file_id, path=None):
2311
"""See Tree.get_file"""
2312
if not self._content_change(file_id):
2313
return self._transform._tree.get_file(file_id, path)
2314
trans_id = self._transform.trans_id_file_id(file_id)
2315
name = self._transform._limbo_name(trans_id)
2316
return open(name, 'rb')
2318
def get_file_with_stat(self, file_id, path=None):
2319
return self.get_file(file_id, path), None
2321
def annotate_iter(self, file_id,
2322
default_revision=_mod_revision.CURRENT_REVISION):
2323
changes = self._iter_changes_cache.get(file_id)
2327
changed_content, versioned, kind = (changes[2], changes[3],
2331
get_old = (kind[0] == 'file' and versioned[0])
2333
old_annotation = self._transform._tree.annotate_iter(file_id,
2334
default_revision=default_revision)
2338
return old_annotation
2339
if not changed_content:
2340
return old_annotation
2341
# TODO: This is doing something similar to what WT.annotate_iter is
2342
# doing, however it fails slightly because it doesn't know what
2343
# the *other* revision_id is, so it doesn't know how to give the
2344
# other as the origin for some lines, they all get
2345
# 'default_revision'
2346
# It would be nice to be able to use the new Annotator based
2347
# approach, as well.
2348
return annotate.reannotate([old_annotation],
2349
self.get_file(file_id).readlines(),
2352
def get_symlink_target(self, file_id, path=None):
2353
"""See Tree.get_symlink_target"""
2354
if not self._content_change(file_id):
2355
return self._transform._tree.get_symlink_target(file_id)
2356
trans_id = self._transform.trans_id_file_id(file_id)
2357
name = self._transform._limbo_name(trans_id)
2358
return osutils.readlink(name)
2360
def walkdirs(self, prefix=''):
2361
pending = [self._transform.root]
2362
while len(pending) > 0:
2363
parent_id = pending.pop()
2366
prefix = prefix.rstrip('/')
2367
parent_path = self._final_paths.get_path(parent_id)
2368
parent_file_id = self._transform.final_file_id(parent_id)
2369
for child_id in self._all_children(parent_id):
2370
path_from_root = self._final_paths.get_path(child_id)
2371
basename = self._transform.final_name(child_id)
2372
file_id = self._transform.final_file_id(child_id)
2373
kind = self._transform.final_kind(child_id)
2374
if kind is not None:
2375
versioned_kind = kind
2378
versioned_kind = self._transform._tree.stored_kind(file_id)
2379
if versioned_kind == 'directory':
2380
subdirs.append(child_id)
2381
children.append((path_from_root, basename, kind, None,
2382
file_id, versioned_kind))
2384
if parent_path.startswith(prefix):
2385
yield (parent_path, parent_file_id), children
2386
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2389
def get_parent_ids(self):
2390
return self._parent_ids
2392
def set_parent_ids(self, parent_ids):
2393
self._parent_ids = parent_ids
2395
def get_revision_tree(self, revision_id):
2396
return self._transform._tree.get_revision_tree(revision_id)
2399
def joinpath(parent, child):
2400
"""Join tree-relative paths, handling the tree root specially"""
2401
if parent is None or parent == "":
2404
return pathjoin(parent, child)
2407
class FinalPaths(object):
2408
"""Make path calculation cheap by memoizing paths.
2410
The underlying tree must not be manipulated between calls, or else
2411
the results will likely be incorrect.
2413
def __init__(self, transform):
2414
object.__init__(self)
2415
self._known_paths = {}
2416
self.transform = transform
2418
def _determine_path(self, trans_id):
2419
if (trans_id == self.transform.root or trans_id == ROOT_PARENT):
2421
name = self.transform.final_name(trans_id)
2422
parent_id = self.transform.final_parent(trans_id)
2423
if parent_id == self.transform.root:
2426
return pathjoin(self.get_path(parent_id), name)
2428
def get_path(self, trans_id):
2429
"""Find the final path associated with a trans_id"""
2430
if trans_id not in self._known_paths:
2431
self._known_paths[trans_id] = self._determine_path(trans_id)
2432
return self._known_paths[trans_id]
2434
def get_paths(self, trans_ids):
2435
return [(self.get_path(t), t) for t in trans_ids]
2439
def topology_sorted_ids(tree):
2440
"""Determine the topological order of the ids in a tree"""
2441
file_ids = list(tree)
2442
file_ids.sort(key=tree.id2path)
2446
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2447
delta_from_tree=False):
2448
"""Create working tree for a branch, using a TreeTransform.
2450
This function should be used on empty trees, having a tree root at most.
2451
(see merge and revert functionality for working with existing trees)
2453
Existing files are handled like so:
2455
- Existing bzrdirs take precedence over creating new items. They are
2456
created as '%s.diverted' % name.
2457
- Otherwise, if the content on disk matches the content we are building,
2458
it is silently replaced.
2459
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2461
:param tree: The tree to convert wt into a copy of
2462
:param wt: The working tree that files will be placed into
2463
:param accelerator_tree: A tree which can be used for retrieving file
2464
contents more quickly than tree itself, i.e. a workingtree. tree
2465
will be used for cases where accelerator_tree's content is different.
2466
:param hardlink: If true, hard-link files to accelerator_tree, where
2467
possible. accelerator_tree must implement abspath, i.e. be a
2469
:param delta_from_tree: If true, build_tree may use the input Tree to
2470
generate the inventory delta.
2472
wt.lock_tree_write()
2476
if accelerator_tree is not None:
2477
accelerator_tree.lock_read()
2479
return _build_tree(tree, wt, accelerator_tree, hardlink,
2482
if accelerator_tree is not None:
2483
accelerator_tree.unlock()
2490
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2491
"""See build_tree."""
2492
for num, _unused in enumerate(wt.all_file_ids()):
2493
if num > 0: # more than just a root
2494
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2496
top_pb = ui.ui_factory.nested_progress_bar()
2497
pp = ProgressPhase("Build phase", 2, top_pb)
2498
if tree.inventory.root is not None:
2499
# This is kind of a hack: we should be altering the root
2500
# as part of the regular tree shape diff logic.
2501
# The conditional test here is to avoid doing an
2502
# expensive operation (flush) every time the root id
2503
# is set within the tree, nor setting the root and thus
2504
# marking the tree as dirty, because we use two different
2505
# idioms here: tree interfaces and inventory interfaces.
2506
if wt.get_root_id() != tree.get_root_id():
2507
wt.set_root_id(tree.get_root_id())
2509
tt = TreeTransform(wt)
2513
file_trans_id[wt.get_root_id()] = \
2514
tt.trans_id_tree_file_id(wt.get_root_id())
2515
pb = ui.ui_factory.nested_progress_bar()
2517
deferred_contents = []
2519
total = len(tree.inventory)
2521
precomputed_delta = []
2523
precomputed_delta = None
2524
# Check if tree inventory has content. If so, we populate
2525
# existing_files with the directory content. If there are no
2526
# entries we skip populating existing_files as its not used.
2527
# This improves performance and unncessary work on large
2528
# directory trees. (#501307)
2530
existing_files = set()
2531
for dir, files in wt.walkdirs():
2532
existing_files.update(f[0] for f in files)
2533
for num, (tree_path, entry) in \
2534
enumerate(tree.inventory.iter_entries_by_dir()):
2535
pb.update("Building tree", num - len(deferred_contents), total)
2536
if entry.parent_id is None:
2539
file_id = entry.file_id
2541
precomputed_delta.append((None, tree_path, file_id, entry))
2542
if tree_path in existing_files:
2543
target_path = wt.abspath(tree_path)
2544
kind = file_kind(target_path)
2545
if kind == "directory":
2547
bzrdir.BzrDir.open(target_path)
2548
except errors.NotBranchError:
2552
if (file_id not in divert and
2553
_content_match(tree, entry, file_id, kind,
2555
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2556
if kind == 'directory':
2558
parent_id = file_trans_id[entry.parent_id]
2559
if entry.kind == 'file':
2560
# We *almost* replicate new_by_entry, so that we can defer
2561
# getting the file text, and get them all at once.
2562
trans_id = tt.create_path(entry.name, parent_id)
2563
file_trans_id[file_id] = trans_id
2564
tt.version_file(file_id, trans_id)
2565
executable = tree.is_executable(file_id, tree_path)
2567
tt.set_executability(executable, trans_id)
2568
trans_data = (trans_id, tree_path, entry.text_sha1)
2569
deferred_contents.append((file_id, trans_data))
2571
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2574
new_trans_id = file_trans_id[file_id]
2575
old_parent = tt.trans_id_tree_path(tree_path)
2576
_reparent_children(tt, old_parent, new_trans_id)
2577
offset = num + 1 - len(deferred_contents)
2578
_create_files(tt, tree, deferred_contents, pb, offset,
2579
accelerator_tree, hardlink)
2583
divert_trans = set(file_trans_id[f] for f in divert)
2584
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2585
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2586
if len(raw_conflicts) > 0:
2587
precomputed_delta = None
2588
conflicts = cook_conflicts(raw_conflicts, tt)
2589
for conflict in conflicts:
2590
trace.warning(unicode(conflict))
2592
wt.add_conflicts(conflicts)
2593
except errors.UnsupportedOperation:
2595
result = tt.apply(no_conflicts=True,
2596
precomputed_delta=precomputed_delta)
2603
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2605
total = len(desired_files) + offset
2607
if accelerator_tree is None:
2608
new_desired_files = desired_files
2610
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2611
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2612
in iter if not (c or e[0] != e[1])]
2613
if accelerator_tree.supports_content_filtering():
2614
unchanged = [(f, p) for (f, p) in unchanged
2615
if not accelerator_tree.iter_search_rules([p]).next()]
2616
unchanged = dict(unchanged)
2617
new_desired_files = []
2619
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2620
accelerator_path = unchanged.get(file_id)
2621
if accelerator_path is None:
2622
new_desired_files.append((file_id,
2623
(trans_id, tree_path, text_sha1)))
2625
pb.update('Adding file contents', count + offset, total)
2627
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2630
contents = accelerator_tree.get_file(file_id, accelerator_path)
2631
if wt.supports_content_filtering():
2632
filters = wt._content_filter_stack(tree_path)
2633
contents = filtered_output_bytes(contents, filters,
2634
ContentFilterContext(tree_path, tree))
2636
tt.create_file(contents, trans_id, sha1=text_sha1)
2640
except AttributeError:
2641
# after filtering, contents may no longer be file-like
2645
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2646
tree.iter_files_bytes(new_desired_files)):
2647
if wt.supports_content_filtering():
2648
filters = wt._content_filter_stack(tree_path)
2649
contents = filtered_output_bytes(contents, filters,
2650
ContentFilterContext(tree_path, tree))
2651
tt.create_file(contents, trans_id, sha1=text_sha1)
2652
pb.update('Adding file contents', count + offset, total)
2655
def _reparent_children(tt, old_parent, new_parent):
2656
for child in tt.iter_tree_children(old_parent):
2657
tt.adjust_path(tt.final_name(child), new_parent, child)
2660
def _reparent_transform_children(tt, old_parent, new_parent):
2661
by_parent = tt.by_parent()
2662
for child in by_parent[old_parent]:
2663
tt.adjust_path(tt.final_name(child), new_parent, child)
2664
return by_parent[old_parent]
2667
def _content_match(tree, entry, file_id, kind, target_path):
2668
if entry.kind != kind:
2670
if entry.kind == "directory":
2672
if entry.kind == "file":
2673
f = file(target_path, 'rb')
2675
if tree.get_file_text(file_id) == f.read():
2679
elif entry.kind == "symlink":
2680
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2685
def resolve_checkout(tt, conflicts, divert):
2686
new_conflicts = set()
2687
for c_type, conflict in ((c[0], c) for c in conflicts):
2688
# Anything but a 'duplicate' would indicate programmer error
2689
if c_type != 'duplicate':
2690
raise AssertionError(c_type)
2691
# Now figure out which is new and which is old
2692
if tt.new_contents(conflict[1]):
2693
new_file = conflict[1]
2694
old_file = conflict[2]
2696
new_file = conflict[2]
2697
old_file = conflict[1]
2699
# We should only get here if the conflict wasn't completely
2701
final_parent = tt.final_parent(old_file)
2702
if new_file in divert:
2703
new_name = tt.final_name(old_file)+'.diverted'
2704
tt.adjust_path(new_name, final_parent, new_file)
2705
new_conflicts.add((c_type, 'Diverted to',
2706
new_file, old_file))
2708
new_name = tt.final_name(old_file)+'.moved'
2709
tt.adjust_path(new_name, final_parent, old_file)
2710
new_conflicts.add((c_type, 'Moved existing file to',
2711
old_file, new_file))
2712
return new_conflicts
2715
def new_by_entry(tt, entry, parent_id, tree):
2716
"""Create a new file according to its inventory entry"""
2720
contents = tree.get_file(entry.file_id).readlines()
2721
executable = tree.is_executable(entry.file_id)
2722
return tt.new_file(name, parent_id, contents, entry.file_id,
2724
elif kind in ('directory', 'tree-reference'):
2725
trans_id = tt.new_directory(name, parent_id, entry.file_id)
2726
if kind == 'tree-reference':
2727
tt.set_tree_reference(entry.reference_revision, trans_id)
2729
elif kind == 'symlink':
2730
target = tree.get_symlink_target(entry.file_id)
2731
return tt.new_symlink(name, parent_id, target, entry.file_id)
2733
raise errors.BadFileKindError(name, kind)
2736
def create_from_tree(tt, trans_id, tree, file_id, bytes=None,
2737
filter_tree_path=None):
2738
"""Create new file contents according to tree contents.
2740
:param filter_tree_path: the tree path to use to lookup
2741
content filters to apply to the bytes output in the working tree.
2742
This only applies if the working tree supports content filtering.
2744
kind = tree.kind(file_id)
2745
if kind == 'directory':
2746
tt.create_directory(trans_id)
2747
elif kind == "file":
2749
tree_file = tree.get_file(file_id)
2751
bytes = tree_file.readlines()
2755
if wt.supports_content_filtering() and filter_tree_path is not None:
2756
filters = wt._content_filter_stack(filter_tree_path)
2757
bytes = filtered_output_bytes(bytes, filters,
2758
ContentFilterContext(filter_tree_path, tree))
2759
tt.create_file(bytes, trans_id)
2760
elif kind == "symlink":
2761
tt.create_symlink(tree.get_symlink_target(file_id), trans_id)
2763
raise AssertionError('Unknown kind %r' % kind)
2766
def create_entry_executability(tt, entry, trans_id):
2767
"""Set the executability of a trans_id according to an inventory entry"""
2768
if entry.kind == "file":
2769
tt.set_executability(entry.executable, trans_id)
2772
@deprecated_function(deprecated_in((2, 3, 0)))
2773
def get_backup_name(entry, by_parent, parent_trans_id, tt):
2774
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
2777
@deprecated_function(deprecated_in((2, 3, 0)))
2778
def _get_backup_name(name, by_parent, parent_trans_id, tt):
2779
"""Produce a backup-style name that appears to be available"""
2783
yield "%s.~%d~" % (name, counter)
2785
for new_name in name_gen():
2786
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
2790
def _entry_changes(file_id, entry, working_tree):
2791
"""Determine in which ways the inventory entry has changed.
2793
Returns booleans: has_contents, content_mod, meta_mod
2794
has_contents means there are currently contents, but they differ
2795
contents_mod means contents need to be modified
2796
meta_mod means the metadata needs to be modified
2798
cur_entry = working_tree.inventory[file_id]
2800
working_kind = working_tree.kind(file_id)
2803
has_contents = False
2806
if has_contents is True:
2807
if entry.kind != working_kind:
2808
contents_mod, meta_mod = True, False
2810
cur_entry._read_tree_state(working_tree.id2path(file_id),
2812
contents_mod, meta_mod = entry.detect_changes(cur_entry)
2813
cur_entry._forget_tree_state()
2814
return has_contents, contents_mod, meta_mod
2817
def revert(working_tree, target_tree, filenames, backups=False,
2818
pb=None, change_reporter=None):
2819
"""Revert a working tree's contents to those of a target tree."""
2820
target_tree.lock_read()
2821
pb = ui.ui_factory.nested_progress_bar()
2822
tt = TreeTransform(working_tree, pb)
2824
pp = ProgressPhase("Revert phase", 3, pb)
2825
conflicts, merge_modified = _prepare_revert_transform(
2826
working_tree, target_tree, tt, filenames, backups, pp)
2828
change_reporter = delta._ChangeReporter(
2829
unversioned_filter=working_tree.is_ignored)
2830
delta.report_changes(tt.iter_changes(), change_reporter)
2831
for conflict in conflicts:
2832
trace.warning(unicode(conflict))
2835
working_tree.set_merge_modified(merge_modified)
2837
target_tree.unlock()
2843
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2844
backups, pp, basis_tree=None,
2845
merge_modified=None):
2846
child_pb = ui.ui_factory.nested_progress_bar()
2848
if merge_modified is None:
2849
merge_modified = working_tree.merge_modified()
2850
merge_modified = _alter_files(working_tree, target_tree, tt,
2851
child_pb, filenames, backups,
2852
merge_modified, basis_tree)
2855
child_pb = ui.ui_factory.nested_progress_bar()
2857
raw_conflicts = resolve_conflicts(tt, child_pb,
2858
lambda t, c: conflict_pass(t, c, target_tree))
2861
conflicts = cook_conflicts(raw_conflicts, tt)
2862
return conflicts, merge_modified
2865
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2866
backups, merge_modified, basis_tree=None):
2867
if basis_tree is not None:
2868
basis_tree.lock_read()
2869
# We ask the working_tree for its changes relative to the target, rather
2870
# than the target changes relative to the working tree. Because WT4 has an
2871
# optimizer to compare itself to a target, but no optimizer for the
2873
change_list = working_tree.iter_changes(target_tree,
2874
specific_files=specific_files, pb=pb)
2875
if target_tree.get_root_id() is None:
2881
for id_num, (file_id, path, changed_content, versioned, parent, name,
2882
kind, executable) in enumerate(change_list):
2883
target_path, wt_path = path
2884
target_versioned, wt_versioned = versioned
2885
target_parent, wt_parent = parent
2886
target_name, wt_name = name
2887
target_kind, wt_kind = kind
2888
target_executable, wt_executable = executable
2889
if skip_root and wt_parent is None:
2891
trans_id = tt.trans_id_file_id(file_id)
2894
keep_content = False
2895
if wt_kind == 'file' and (backups or target_kind is None):
2896
wt_sha1 = working_tree.get_file_sha1(file_id)
2897
if merge_modified.get(file_id) != wt_sha1:
2898
# acquire the basis tree lazily to prevent the
2899
# expense of accessing it when it's not needed ?
2900
# (Guessing, RBC, 200702)
2901
if basis_tree is None:
2902
basis_tree = working_tree.basis_tree()
2903
basis_tree.lock_read()
2904
if file_id in basis_tree:
2905
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2907
elif target_kind is None and not target_versioned:
2909
if wt_kind is not None:
2910
if not keep_content:
2911
tt.delete_contents(trans_id)
2912
elif target_kind is not None:
2913
parent_trans_id = tt.trans_id_file_id(wt_parent)
2914
backup_name = tt._available_backup_name(
2915
wt_name, parent_trans_id)
2916
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2917
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2918
if wt_versioned and target_versioned:
2919
tt.unversion_file(trans_id)
2920
tt.version_file(file_id, new_trans_id)
2921
# New contents should have the same unix perms as old
2924
trans_id = new_trans_id
2925
if target_kind in ('directory', 'tree-reference'):
2926
tt.create_directory(trans_id)
2927
if target_kind == 'tree-reference':
2928
revision = target_tree.get_reference_revision(file_id,
2930
tt.set_tree_reference(revision, trans_id)
2931
elif target_kind == 'symlink':
2932
tt.create_symlink(target_tree.get_symlink_target(file_id),
2934
elif target_kind == 'file':
2935
deferred_files.append((file_id, (trans_id, mode_id)))
2936
if basis_tree is None:
2937
basis_tree = working_tree.basis_tree()
2938
basis_tree.lock_read()
2939
new_sha1 = target_tree.get_file_sha1(file_id)
2940
if (file_id in basis_tree and new_sha1 ==
2941
basis_tree.get_file_sha1(file_id)):
2942
if file_id in merge_modified:
2943
del merge_modified[file_id]
2945
merge_modified[file_id] = new_sha1
2947
# preserve the execute bit when backing up
2948
if keep_content and wt_executable == target_executable:
2949
tt.set_executability(target_executable, trans_id)
2950
elif target_kind is not None:
2951
raise AssertionError(target_kind)
2952
if not wt_versioned and target_versioned:
2953
tt.version_file(file_id, trans_id)
2954
if wt_versioned and not target_versioned:
2955
tt.unversion_file(trans_id)
2956
if (target_name is not None and
2957
(wt_name != target_name or wt_parent != target_parent)):
2958
if target_name == '' and target_parent is None:
2959
parent_trans = ROOT_PARENT
2961
parent_trans = tt.trans_id_file_id(target_parent)
2962
if wt_parent is None and wt_versioned:
2963
tt.adjust_root_path(target_name, parent_trans)
2965
tt.adjust_path(target_name, parent_trans, trans_id)
2966
if wt_executable != target_executable and target_kind == "file":
2967
tt.set_executability(target_executable, trans_id)
2968
if working_tree.supports_content_filtering():
2969
for index, ((trans_id, mode_id), bytes) in enumerate(
2970
target_tree.iter_files_bytes(deferred_files)):
2971
file_id = deferred_files[index][0]
2972
# We're reverting a tree to the target tree so using the
2973
# target tree to find the file path seems the best choice
2974
# here IMO - Ian C 27/Oct/2009
2975
filter_tree_path = target_tree.id2path(file_id)
2976
filters = working_tree._content_filter_stack(filter_tree_path)
2977
bytes = filtered_output_bytes(bytes, filters,
2978
ContentFilterContext(filter_tree_path, working_tree))
2979
tt.create_file(bytes, trans_id, mode_id)
2981
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
2983
tt.create_file(bytes, trans_id, mode_id)
2984
tt.fixup_new_roots()
2986
if basis_tree is not None:
2988
return merge_modified
2991
def resolve_conflicts(tt, pb=None, pass_func=None):
2992
"""Make many conflict-resolution attempts, but die if they fail"""
2993
if pass_func is None:
2994
pass_func = conflict_pass
2995
new_conflicts = set()
2996
pb = ui.ui_factory.nested_progress_bar()
2999
pb.update('Resolution pass', n+1, 10)
3000
conflicts = tt.find_conflicts()
3001
if len(conflicts) == 0:
3002
return new_conflicts
3003
new_conflicts.update(pass_func(tt, conflicts))
3004
raise MalformedTransform(conflicts=conflicts)
3009
def conflict_pass(tt, conflicts, path_tree=None):
3010
"""Resolve some classes of conflicts.
3012
:param tt: The transform to resolve conflicts in
3013
:param conflicts: The conflicts to resolve
3014
:param path_tree: A Tree to get supplemental paths from
3016
new_conflicts = set()
3017
for c_type, conflict in ((c[0], c) for c in conflicts):
3018
if c_type == 'duplicate id':
3019
tt.unversion_file(conflict[1])
3020
new_conflicts.add((c_type, 'Unversioned existing file',
3021
conflict[1], conflict[2], ))
3022
elif c_type == 'duplicate':
3023
# files that were renamed take precedence
3024
final_parent = tt.final_parent(conflict[1])
3025
if tt.path_changed(conflict[1]):
3026
existing_file, new_file = conflict[2], conflict[1]
3028
existing_file, new_file = conflict[1], conflict[2]
3029
new_name = tt.final_name(existing_file)+'.moved'
3030
tt.adjust_path(new_name, final_parent, existing_file)
3031
new_conflicts.add((c_type, 'Moved existing file to',
3032
existing_file, new_file))
3033
elif c_type == 'parent loop':
3034
# break the loop by undoing one of the ops that caused the loop
3036
while not tt.path_changed(cur):
3037
cur = tt.final_parent(cur)
3038
new_conflicts.add((c_type, 'Cancelled move', cur,
3039
tt.final_parent(cur),))
3040
tt.adjust_path(tt.final_name(cur), tt.get_tree_parent(cur), cur)
3042
elif c_type == 'missing parent':
3043
trans_id = conflict[1]
3044
if trans_id in tt._removed_contents:
3045
cancel_deletion = True
3046
orphans = tt._get_potential_orphans(trans_id)
3048
cancel_deletion = False
3049
# All children are orphans
3052
tt.new_orphan(o, trans_id)
3053
except OrphaningError:
3054
# Something bad happened so we cancel the directory
3055
# deletion which will leave it in place with a
3056
# conflict. The user can deal with it from there.
3057
# Note that this also catch the case where we don't
3058
# want to create orphans and leave the directory in
3060
cancel_deletion = True
3063
# Cancel the directory deletion
3064
tt.cancel_deletion(trans_id)
3065
new_conflicts.add(('deleting parent', 'Not deleting',
3070
tt.final_name(trans_id)
3072
if path_tree is not None:
3073
file_id = tt.final_file_id(trans_id)
3075
file_id = tt.inactive_file_id(trans_id)
3076
_, entry = path_tree.iter_entries_by_dir(
3078
# special-case the other tree root (move its
3079
# children to current root)
3080
if entry.parent_id is None:
3082
moved = _reparent_transform_children(
3083
tt, trans_id, tt.root)
3085
new_conflicts.add((c_type, 'Moved to root',
3088
parent_trans_id = tt.trans_id_file_id(
3090
tt.adjust_path(entry.name, parent_trans_id,
3093
tt.create_directory(trans_id)
3094
new_conflicts.add((c_type, 'Created directory', trans_id))
3095
elif c_type == 'unversioned parent':
3096
file_id = tt.inactive_file_id(conflict[1])
3097
# special-case the other tree root (move its children instead)
3098
if path_tree and file_id in path_tree:
3099
if path_tree.path2id('') == file_id:
3100
# This is the root entry, skip it
3102
tt.version_file(file_id, conflict[1])
3103
new_conflicts.add((c_type, 'Versioned directory', conflict[1]))
3104
elif c_type == 'non-directory parent':
3105
parent_id = conflict[1]
3106
parent_parent = tt.final_parent(parent_id)
3107
parent_name = tt.final_name(parent_id)
3108
parent_file_id = tt.final_file_id(parent_id)
3109
new_parent_id = tt.new_directory(parent_name + '.new',
3110
parent_parent, parent_file_id)
3111
_reparent_transform_children(tt, parent_id, new_parent_id)
3112
if parent_file_id is not None:
3113
tt.unversion_file(parent_id)
3114
new_conflicts.add((c_type, 'Created directory', new_parent_id))
3115
elif c_type == 'versioning no contents':
3116
tt.cancel_versioning(conflict[1])
3117
return new_conflicts
3120
def cook_conflicts(raw_conflicts, tt):
3121
"""Generate a list of cooked conflicts, sorted by file path"""
3122
from bzrlib.conflicts import Conflict
3123
conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3124
return sorted(conflict_iter, key=Conflict.sort_key)
3127
def iter_cook_conflicts(raw_conflicts, tt):
3128
from bzrlib.conflicts import Conflict
3130
for conflict in raw_conflicts:
3131
c_type = conflict[0]
3132
action = conflict[1]
3133
modified_path = fp.get_path(conflict[2])
3134
modified_id = tt.final_file_id(conflict[2])
3135
if len(conflict) == 3:
3136
yield Conflict.factory(c_type, action=action, path=modified_path,
3137
file_id=modified_id)
3140
conflicting_path = fp.get_path(conflict[3])
3141
conflicting_id = tt.final_file_id(conflict[3])
3142
yield Conflict.factory(c_type, action=action, path=modified_path,
3143
file_id=modified_id,
3144
conflict_path=conflicting_path,
3145
conflict_file_id=conflicting_id)
3148
class _FileMover(object):
3149
"""Moves and deletes files for TreeTransform, tracking operations"""
3152
self.past_renames = []
3153
self.pending_deletions = []
3155
def rename(self, from_, to):
3156
"""Rename a file from one path to another."""
3158
os.rename(from_, to)
3160
if e.errno in (errno.EEXIST, errno.ENOTEMPTY):
3161
raise errors.FileExists(to, str(e))
3162
# normal OSError doesn't include filenames so it's hard to see where
3163
# the problem is, see https://bugs.launchpad.net/bzr/+bug/491763
3164
raise errors.TransformRenameFailed(from_, to, str(e), e.errno)
3165
self.past_renames.append((from_, to))
3167
def pre_delete(self, from_, to):
3168
"""Rename a file out of the way and mark it for deletion.
3170
Unlike os.unlink, this works equally well for files and directories.
3171
:param from_: The current file path
3172
:param to: A temporary path for the file
3174
self.rename(from_, to)
3175
self.pending_deletions.append(to)
3178
"""Reverse all renames that have been performed"""
3179
for from_, to in reversed(self.past_renames):
3181
os.rename(to, from_)
3183
raise errors.TransformRenameFailed(to, from_, str(e), e.errno)
3184
# after rollback, don't reuse _FileMover
3186
pending_deletions = None
3188
def apply_deletions(self):
3189
"""Apply all marked deletions"""
3190
for path in self.pending_deletions:
3192
# after apply_deletions, don't reuse _FileMover
3194
pending_deletions = None