1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""WorkingTree4 format and implementation.
19
WorkingTree4 provides the dirstate based working tree logic.
21
To get a WorkingTree, call bzrdir.open_workingtree() or
22
WorkingTree.open(dir).
25
from cStringIO import StringIO
29
from bzrlib.lazy_import import lazy_import
30
lazy_import(globals(), """
31
from bisect import bisect_left
33
from copy import deepcopy
45
conflicts as _mod_conflicts,
55
revision as _mod_revision,
64
from bzrlib.transport import get_transport
68
from bzrlib import symbol_versioning
69
from bzrlib.decorators import needs_read_lock, needs_write_lock
70
from bzrlib.inventory import InventoryEntry, Inventory, ROOT_ID, entry_factory
71
from bzrlib.lockable_files import LockableFiles, TransportLock
72
from bzrlib.lockdir import LockDir
73
import bzrlib.mutabletree
74
from bzrlib.mutabletree import needs_tree_write_lock
75
from bzrlib.osutils import (
85
from bzrlib.trace import mutter, note
86
from bzrlib.transport.local import LocalTransport
87
from bzrlib.tree import InterTree
88
from bzrlib.progress import DummyProgress, ProgressPhase
89
from bzrlib.revision import NULL_REVISION, CURRENT_REVISION
90
from bzrlib.rio import RioReader, rio_file, Stanza
91
from bzrlib.symbol_versioning import (deprecated_passed,
96
from bzrlib.tree import Tree
97
from bzrlib.workingtree import WorkingTree, WorkingTree3, WorkingTreeFormat3
100
# This is the Windows equivalent of ENOTDIR
101
# It is defined in pywin32.winerror, but we don't want a strong dependency for
102
# just an error code.
103
ERROR_PATH_NOT_FOUND = 3
104
ERROR_DIRECTORY = 267
107
class WorkingTree4(WorkingTree3):
108
"""This is the Format 4 working tree.
110
This differs from WorkingTree3 by:
111
- Having a consolidated internal dirstate, stored in a
112
randomly-accessible sorted file on disk.
113
- Not having a regular inventory attribute. One can be synthesized
114
on demand but this is expensive and should be avoided.
116
This is new in bzr 0.15.
119
def __init__(self, basedir,
124
"""Construct a WorkingTree for basedir.
126
If the branch is not supplied, it is opened automatically.
127
If the branch is supplied, it must be the branch for this basedir.
128
(branch.base is not cross checked, because for remote branches that
129
would be meaningless).
131
self._format = _format
132
self.bzrdir = _bzrdir
133
from bzrlib.trace import note, mutter
134
assert isinstance(basedir, basestring), \
135
"base directory %r is not a string" % basedir
136
basedir = safe_unicode(basedir)
137
mutter("opening working tree %r", basedir)
138
self._branch = branch
139
assert isinstance(self.branch, bzrlib.branch.Branch), \
140
"branch %r is not a Branch" % self.branch
141
self.basedir = realpath(basedir)
142
# if branch is at our basedir and is a format 6 or less
143
# assume all other formats have their own control files.
144
assert isinstance(_control_files, LockableFiles), \
145
"_control_files must be a LockableFiles, not %r" % _control_files
146
self._control_files = _control_files
149
# during a read or write lock these objects are set, and are
150
# None the rest of the time.
151
self._dirstate = None
152
self._inventory = None
155
@needs_tree_write_lock
156
def _add(self, files, ids, kinds):
157
"""See MutableTree._add."""
158
state = self.current_dirstate()
159
for f, file_id, kind in zip(files, ids, kinds):
164
# special case tree root handling.
165
if f == '' and self.path2id(f) == ROOT_ID:
166
state.set_path_id('', generate_ids.gen_file_id(f))
169
file_id = generate_ids.gen_file_id(f)
170
# deliberately add the file with no cached stat or sha1
171
# - on the first access it will be gathered, and we can
172
# always change this once tests are all passing.
173
state.add(f, file_id, kind, None, '')
174
self._make_dirty(reset_inventory=True)
176
def _make_dirty(self, reset_inventory):
177
"""Make the tree state dirty.
179
:param reset_inventory: True if the cached inventory should be removed
180
(presuming there is one).
183
if reset_inventory and self._inventory is not None:
184
self._inventory = None
186
@needs_tree_write_lock
187
def add_reference(self, sub_tree):
188
# use standard implementation, which calls back to self._add
190
# So we don't store the reference_revision in the working dirstate,
191
# it's just recorded at the moment of commit.
192
self._add_reference(sub_tree)
194
def break_lock(self):
195
"""Break a lock if one is present from another instance.
197
Uses the ui factory to ask for confirmation if the lock may be from
200
This will probe the repository for its lock as well.
202
# if the dirstate is locked by an active process, reject the break lock
205
if self._dirstate is None:
209
state = self._current_dirstate()
210
if state._lock_token is not None:
211
# we already have it locked. sheese, cant break our own lock.
212
raise errors.LockActive(self.basedir)
215
# try for a write lock - need permission to get one anyhow
218
except errors.LockContention:
219
# oslocks fail when a process is still live: fail.
220
# TODO: get the locked lockdir info and give to the user to
221
# assist in debugging.
222
raise errors.LockActive(self.basedir)
227
self._dirstate = None
228
self._control_files.break_lock()
229
self.branch.break_lock()
231
def _comparison_data(self, entry, path):
232
kind, executable, stat_value = \
233
WorkingTree3._comparison_data(self, entry, path)
234
# it looks like a plain directory, but it's really a reference -- see
236
if (self._repo_supports_tree_reference and
237
kind == 'directory' and
238
self._directory_is_tree_reference(path)):
239
kind = 'tree-reference'
240
return kind, executable, stat_value
243
def commit(self, message=None, revprops=None, *args, **kwargs):
244
# mark the tree as dirty post commit - commit
245
# can change the current versioned list by doing deletes.
246
result = WorkingTree3.commit(self, message, revprops, *args, **kwargs)
247
self._make_dirty(reset_inventory=True)
250
def current_dirstate(self):
251
"""Return the current dirstate object.
253
This is not part of the tree interface and only exposed for ease of
256
:raises errors.NotWriteLocked: when not in a lock.
258
self._must_be_locked()
259
return self._current_dirstate()
261
def _current_dirstate(self):
262
"""Internal function that does not check lock status.
264
This is needed for break_lock which also needs the dirstate.
266
if self._dirstate is not None:
267
return self._dirstate
268
local_path = self.bzrdir.get_workingtree_transport(None
269
).local_abspath('dirstate')
270
self._dirstate = dirstate.DirState.on_file(local_path)
271
return self._dirstate
273
def _directory_is_tree_reference(self, relpath):
274
# as a special case, if a directory contains control files then
275
# it's a tree reference, except that the root of the tree is not
276
return relpath and osutils.isdir(self.abspath(relpath) + u"/.bzr")
277
# TODO: We could ask all the control formats whether they
278
# recognize this directory, but at the moment there's no cheap api
279
# to do that. Since we probably can only nest bzr checkouts and
280
# they always use this name it's ok for now. -- mbp 20060306
282
# FIXME: There is an unhandled case here of a subdirectory
283
# containing .bzr but not a branch; that will probably blow up
284
# when you try to commit it. It might happen if there is a
285
# checkout in a subdirectory. This can be avoided by not adding
288
def filter_unversioned_files(self, paths):
289
"""Filter out paths that are versioned.
291
:return: set of paths.
293
# TODO: make a generic multi-bisect routine roughly that should list
294
# the paths, then process one half at a time recursively, and feed the
295
# results of each bisect in further still
296
paths = sorted(paths)
298
state = self.current_dirstate()
299
# TODO we want a paths_to_dirblocks helper I think
301
dirname, basename = os.path.split(path.encode('utf8'))
302
_, _, _, path_is_versioned = state._get_block_entry_index(
303
dirname, basename, 0)
304
if not path_is_versioned:
309
"""Write all cached data to disk."""
310
if self._control_files._lock_mode != 'w':
311
raise errors.NotWriteLocked(self)
312
self.current_dirstate().save()
313
self._inventory = None
316
@needs_tree_write_lock
317
def _gather_kinds(self, files, kinds):
318
"""See MutableTree._gather_kinds."""
319
for pos, f in enumerate(files):
320
if kinds[pos] is None:
321
kinds[pos] = self._kind(f)
323
def _generate_inventory(self):
324
"""Create and set self.inventory from the dirstate object.
326
This is relatively expensive: we have to walk the entire dirstate.
327
Ideally we would not, and can deprecate this function.
329
#: uncomment to trap on inventory requests.
330
# import pdb;pdb.set_trace()
331
state = self.current_dirstate()
332
state._read_dirblocks_if_needed()
333
root_key, current_entry = self._get_entry(path='')
334
current_id = root_key[2]
335
assert current_entry[0][0] == 'd' # directory
336
inv = Inventory(root_id=current_id)
337
# Turn some things into local variables
338
minikind_to_kind = dirstate.DirState._minikind_to_kind
339
factory = entry_factory
340
utf8_decode = cache_utf8._utf8_decode
342
# we could do this straight out of the dirstate; it might be fast
343
# and should be profiled - RBC 20070216
344
parent_ies = {'' : inv.root}
345
for block in state._dirblocks[1:]: # skip the root
348
parent_ie = parent_ies[dirname]
350
# all the paths in this block are not versioned in this tree
352
for key, entry in block[1]:
353
minikind, link_or_sha1, size, executable, stat = entry[0]
354
if minikind in ('a', 'r'): # absent, relocated
355
# a parent tree only entry
358
name_unicode = utf8_decode(name)[0]
360
kind = minikind_to_kind[minikind]
361
inv_entry = factory[kind](file_id, name_unicode,
364
# This is only needed on win32, where this is the only way
365
# we know the executable bit.
366
inv_entry.executable = executable
367
# not strictly needed: working tree
368
#inv_entry.text_size = size
369
#inv_entry.text_sha1 = sha1
370
elif kind == 'directory':
371
# add this entry to the parent map.
372
parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
373
elif kind == 'tree-reference':
374
assert self._repo_supports_tree_reference, \
375
"repository of %r " \
376
"doesn't support tree references " \
377
"required by entry %r" \
379
inv_entry.reference_revision = link_or_sha1 or None
380
elif kind != 'symlink':
381
raise AssertionError("unknown kind %r" % kind)
382
# These checks cost us around 40ms on a 55k entry tree
383
assert file_id not in inv_byid, ('file_id %s already in'
384
' inventory as %s' % (file_id, inv_byid[file_id]))
385
assert name_unicode not in parent_ie.children
386
inv_byid[file_id] = inv_entry
387
parent_ie.children[name_unicode] = inv_entry
388
self._inventory = inv
390
def _get_entry(self, file_id=None, path=None):
391
"""Get the dirstate row for file_id or path.
393
If either file_id or path is supplied, it is used as the key to lookup.
394
If both are supplied, the fastest lookup is used, and an error is
395
raised if they do not both point at the same row.
397
:param file_id: An optional unicode file_id to be looked up.
398
:param path: An optional unicode path to be looked up.
399
:return: The dirstate row tuple for path/file_id, or (None, None)
401
if file_id is None and path is None:
402
raise errors.BzrError('must supply file_id or path')
403
state = self.current_dirstate()
405
path = path.encode('utf8')
406
return state._get_entry(0, fileid_utf8=file_id, path_utf8=path)
408
def get_file_sha1(self, file_id, path=None, stat_value=None):
409
# check file id is valid unconditionally.
410
entry = self._get_entry(file_id=file_id, path=path)
412
raise errors.NoSuchId(self, file_id)
414
path = pathjoin(entry[0][0], entry[0][1]).decode('utf8')
416
file_abspath = self.abspath(path)
417
state = self.current_dirstate()
418
if stat_value is None:
420
stat_value = os.lstat(file_abspath)
422
if e.errno == errno.ENOENT:
426
link_or_sha1 = state.update_entry(entry, file_abspath,
427
stat_value=stat_value)
428
if entry[1][0][0] == 'f':
432
def _get_inventory(self):
433
"""Get the inventory for the tree. This is only valid within a lock."""
434
if self._inventory is not None:
435
return self._inventory
436
self._must_be_locked()
437
self._generate_inventory()
438
return self._inventory
440
inventory = property(_get_inventory,
441
doc="Inventory of this Tree")
444
def get_parent_ids(self):
445
"""See Tree.get_parent_ids.
447
This implementation requests the ids list from the dirstate file.
449
return self.current_dirstate().get_parent_ids()
451
def get_reference_revision(self, file_id, path=None):
452
# referenced tree's revision is whatever's currently there
453
return self.get_nested_tree(file_id, path).last_revision()
455
def get_nested_tree(self, file_id, path=None):
457
path = self.id2path(file_id)
458
# else: check file_id is at path?
459
return WorkingTree.open(self.abspath(path))
462
def get_root_id(self):
463
"""Return the id of this trees root"""
464
return self._get_entry(path='')[0][2]
466
def has_id(self, file_id):
467
state = self.current_dirstate()
468
file_id = osutils.safe_file_id(file_id)
469
row, parents = self._get_entry(file_id=file_id)
472
return osutils.lexists(pathjoin(
473
self.basedir, row[0].decode('utf8'), row[1].decode('utf8')))
476
def id2path(self, file_id):
477
"Convert a file-id to a path."
478
file_id = osutils.safe_file_id(file_id)
479
state = self.current_dirstate()
480
entry = self._get_entry(file_id=file_id)
481
if entry == (None, None):
482
raise errors.NoSuchId(tree=self, file_id=file_id)
483
path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
484
return path_utf8.decode('utf8')
486
if not osutils.supports_executable():
487
def is_executable(self, file_id, path=None):
488
"""Test if a file is executable or not.
490
Note: The caller is expected to take a read-lock before calling this.
492
file_id = osutils.safe_file_id(file_id)
493
entry = self._get_entry(file_id=file_id, path=path)
494
if entry == (None, None):
496
return entry[1][0][3]
498
def is_executable(self, file_id, path=None):
499
"""Test if a file is executable or not.
501
Note: The caller is expected to take a read-lock before calling this.
504
file_id = osutils.safe_file_id(file_id)
505
path = self.id2path(file_id)
506
mode = os.lstat(self.abspath(path)).st_mode
507
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
511
"""Iterate through file_ids for this tree.
513
file_ids are in a WorkingTree if they are in the working inventory
514
and the working file exists.
517
for key, tree_details in self.current_dirstate()._iter_entries():
518
if tree_details[0][0] in ('a', 'r'): # absent, relocated
519
# not relevant to the working tree
521
path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
522
if osutils.lexists(path):
523
result.append(key[2])
526
def iter_references(self):
527
for key, tree_details in self.current_dirstate()._iter_entries():
528
if tree_details[0][0] in ('a', 'r'): # absent, relocated
529
# not relevant to the working tree
532
# the root is not a reference.
534
path = pathjoin(self.basedir, key[0].decode('utf8'), key[1].decode('utf8'))
536
if self._kind(path) == 'tree-reference':
538
except errors.NoSuchFile:
539
# path is missing on disk.
542
def kind(self, file_id):
543
"""Return the kind of a file.
545
This is always the actual kind that's on disk, regardless of what it
548
Note: The caller is expected to take a read-lock before calling this.
550
relpath = self.id2path(file_id)
551
assert relpath != None, \
552
"path for id {%s} is None!" % file_id
553
return self._kind(relpath)
555
def _kind(self, relpath):
556
abspath = self.abspath(relpath)
557
kind = file_kind(abspath)
558
if (self._repo_supports_tree_reference and
559
kind == 'directory' and
560
self._directory_is_tree_reference(relpath)):
561
kind = 'tree-reference'
565
def _last_revision(self):
566
"""See Mutable.last_revision."""
567
parent_ids = self.current_dirstate().get_parent_ids()
571
return _mod_revision.NULL_REVISION
574
"""See Branch.lock_read, and WorkingTree.unlock."""
575
self.branch.lock_read()
577
self._control_files.lock_read()
579
state = self.current_dirstate()
580
if not state._lock_token:
582
# set our support for tree references from the repository in
584
self._repo_supports_tree_reference = getattr(
585
self.branch.repository._format, "supports_tree_reference",
588
self._control_files.unlock()
594
def _lock_self_write(self):
595
"""This should be called after the branch is locked."""
597
self._control_files.lock_write()
599
state = self.current_dirstate()
600
if not state._lock_token:
602
# set our support for tree references from the repository in
604
self._repo_supports_tree_reference = getattr(
605
self.branch.repository._format, "supports_tree_reference",
608
self._control_files.unlock()
614
def lock_tree_write(self):
615
"""See MutableTree.lock_tree_write, and WorkingTree.unlock."""
616
self.branch.lock_read()
617
self._lock_self_write()
619
def lock_write(self):
620
"""See MutableTree.lock_write, and WorkingTree.unlock."""
621
self.branch.lock_write()
622
self._lock_self_write()
624
@needs_tree_write_lock
625
def move(self, from_paths, to_dir, after=False):
626
"""See WorkingTree.move()."""
631
state = self.current_dirstate()
633
assert not isinstance(from_paths, basestring)
634
to_dir_utf8 = to_dir.encode('utf8')
635
to_entry_dirname, to_basename = os.path.split(to_dir_utf8)
636
id_index = state._get_id_index()
637
# check destination directory
638
# get the details for it
639
to_entry_block_index, to_entry_entry_index, dir_present, entry_present = \
640
state._get_block_entry_index(to_entry_dirname, to_basename, 0)
641
if not entry_present:
642
raise errors.BzrMoveFailedError('', to_dir,
643
errors.NotVersionedError(to_dir))
644
to_entry = state._dirblocks[to_entry_block_index][1][to_entry_entry_index]
645
# get a handle on the block itself.
646
to_block_index = state._ensure_block(
647
to_entry_block_index, to_entry_entry_index, to_dir_utf8)
648
to_block = state._dirblocks[to_block_index]
649
to_abs = self.abspath(to_dir)
650
if not isdir(to_abs):
651
raise errors.BzrMoveFailedError('',to_dir,
652
errors.NotADirectory(to_abs))
654
if to_entry[1][0][0] != 'd':
655
raise errors.BzrMoveFailedError('',to_dir,
656
errors.NotADirectory(to_abs))
658
if self._inventory is not None:
659
update_inventory = True
661
to_dir_ie = inv[to_dir_id]
662
to_dir_id = to_entry[0][2]
664
update_inventory = False
667
def move_one(old_entry, from_path_utf8, minikind, executable,
668
fingerprint, packed_stat, size,
669
to_block, to_key, to_path_utf8):
670
state._make_absent(old_entry)
671
from_key = old_entry[0]
673
lambda:state.update_minimal(from_key,
675
executable=executable,
676
fingerprint=fingerprint,
677
packed_stat=packed_stat,
679
path_utf8=from_path_utf8))
680
state.update_minimal(to_key,
682
executable=executable,
683
fingerprint=fingerprint,
684
packed_stat=packed_stat,
686
path_utf8=to_path_utf8)
687
added_entry_index, _ = state._find_entry_index(to_key, to_block[1])
688
new_entry = to_block[1][added_entry_index]
689
rollbacks.append(lambda:state._make_absent(new_entry))
691
for from_rel in from_paths:
692
# from_rel is 'pathinroot/foo/bar'
693
from_rel_utf8 = from_rel.encode('utf8')
694
from_dirname, from_tail = osutils.split(from_rel)
695
from_dirname, from_tail_utf8 = osutils.split(from_rel_utf8)
696
from_entry = self._get_entry(path=from_rel)
697
if from_entry == (None, None):
698
raise errors.BzrMoveFailedError(from_rel,to_dir,
699
errors.NotVersionedError(path=str(from_rel)))
701
from_id = from_entry[0][2]
702
to_rel = pathjoin(to_dir, from_tail)
703
to_rel_utf8 = pathjoin(to_dir_utf8, from_tail_utf8)
704
item_to_entry = self._get_entry(path=to_rel)
705
if item_to_entry != (None, None):
706
raise errors.BzrMoveFailedError(from_rel, to_rel,
707
"Target is already versioned.")
709
if from_rel == to_rel:
710
raise errors.BzrMoveFailedError(from_rel, to_rel,
711
"Source and target are identical.")
713
from_missing = not self.has_filename(from_rel)
714
to_missing = not self.has_filename(to_rel)
721
raise errors.BzrMoveFailedError(from_rel, to_rel,
722
errors.NoSuchFile(path=to_rel,
723
extra="New file has not been created yet"))
725
# neither path exists
726
raise errors.BzrRenameFailedError(from_rel, to_rel,
727
errors.PathsDoNotExist(paths=(from_rel, to_rel)))
729
if from_missing: # implicitly just update our path mapping
732
raise errors.RenameFailedFilesExist(from_rel, to_rel,
733
extra="(Use --after to update the Bazaar id)")
736
def rollback_rename():
737
"""A single rename has failed, roll it back."""
738
# roll back everything, even if we encounter trouble doing one
741
# TODO: at least log the other exceptions rather than just
742
# losing them mbp 20070307
744
for rollback in reversed(rollbacks):
748
exc_info = sys.exc_info()
750
raise exc_info[0], exc_info[1], exc_info[2]
752
# perform the disk move first - its the most likely failure point.
754
from_rel_abs = self.abspath(from_rel)
755
to_rel_abs = self.abspath(to_rel)
757
osutils.rename(from_rel_abs, to_rel_abs)
759
raise errors.BzrMoveFailedError(from_rel, to_rel, e[1])
760
rollbacks.append(lambda: osutils.rename(to_rel_abs, from_rel_abs))
762
# perform the rename in the inventory next if needed: its easy
766
from_entry = inv[from_id]
767
current_parent = from_entry.parent_id
768
inv.rename(from_id, to_dir_id, from_tail)
770
lambda: inv.rename(from_id, current_parent, from_tail))
771
# finally do the rename in the dirstate, which is a little
772
# tricky to rollback, but least likely to need it.
773
old_block_index, old_entry_index, dir_present, file_present = \
774
state._get_block_entry_index(from_dirname, from_tail_utf8, 0)
775
old_block = state._dirblocks[old_block_index][1]
776
old_entry = old_block[old_entry_index]
777
from_key, old_entry_details = old_entry
778
cur_details = old_entry_details[0]
780
to_key = ((to_block[0],) + from_key[1:3])
781
minikind = cur_details[0]
782
move_one(old_entry, from_path_utf8=from_rel_utf8,
784
executable=cur_details[3],
785
fingerprint=cur_details[1],
786
packed_stat=cur_details[4],
790
to_path_utf8=to_rel_utf8)
793
def update_dirblock(from_dir, to_key, to_dir_utf8):
794
"""Recursively update all entries in this dirblock."""
795
assert from_dir != '', "renaming root not supported"
796
from_key = (from_dir, '')
797
from_block_idx, present = \
798
state._find_block_index_from_key(from_key)
800
# This is the old record, if it isn't present, then
801
# there is theoretically nothing to update.
802
# (Unless it isn't present because of lazy loading,
803
# but we don't do that yet)
805
from_block = state._dirblocks[from_block_idx]
806
to_block_index, to_entry_index, _, _ = \
807
state._get_block_entry_index(to_key[0], to_key[1], 0)
808
to_block_index = state._ensure_block(
809
to_block_index, to_entry_index, to_dir_utf8)
810
to_block = state._dirblocks[to_block_index]
812
# Grab a copy since move_one may update the list.
813
for entry in from_block[1][:]:
814
assert entry[0][0] == from_dir
815
cur_details = entry[1][0]
816
to_key = (to_dir_utf8, entry[0][1], entry[0][2])
817
from_path_utf8 = osutils.pathjoin(entry[0][0], entry[0][1])
818
to_path_utf8 = osutils.pathjoin(to_dir_utf8, entry[0][1])
819
minikind = cur_details[0]
821
# Deleted children of a renamed directory
822
# Do not need to be updated.
823
# Children that have been renamed out of this
824
# directory should also not be updated
826
move_one(entry, from_path_utf8=from_path_utf8,
828
executable=cur_details[3],
829
fingerprint=cur_details[1],
830
packed_stat=cur_details[4],
834
to_path_utf8=to_path_utf8)
836
# We need to move all the children of this
838
update_dirblock(from_path_utf8, to_key,
840
update_dirblock(from_rel_utf8, to_key, to_rel_utf8)
844
result.append((from_rel, to_rel))
845
state._dirblock_state = dirstate.DirState.IN_MEMORY_MODIFIED
846
self._make_dirty(reset_inventory=False)
850
def _must_be_locked(self):
851
if not self._control_files._lock_count:
852
raise errors.ObjectNotLocked(self)
855
"""Initialize the state in this tree to be a new tree."""
859
def path2id(self, path):
860
"""Return the id for path in this tree."""
861
path = path.strip('/')
862
entry = self._get_entry(path=path)
863
if entry == (None, None):
867
def paths2ids(self, paths, trees=[], require_versioned=True):
868
"""See Tree.paths2ids().
870
This specialisation fast-paths the case where all the trees are in the
875
parents = self.get_parent_ids()
877
if not (isinstance(tree, DirStateRevisionTree) and tree._revision_id in
879
return super(WorkingTree4, self).paths2ids(paths, trees, require_versioned)
880
search_indexes = [0] + [1 + parents.index(tree._revision_id) for tree in trees]
881
# -- make all paths utf8 --
884
paths_utf8.add(path.encode('utf8'))
886
# -- paths is now a utf8 path set --
887
# -- get the state object and prepare it.
888
state = self.current_dirstate()
889
if False and (state._dirblock_state == dirstate.DirState.NOT_IN_MEMORY
890
and '' not in paths):
891
paths2ids = self._paths2ids_using_bisect
893
paths2ids = self._paths2ids_in_memory
894
return paths2ids(paths, search_indexes,
895
require_versioned=require_versioned)
897
def _paths2ids_in_memory(self, paths, search_indexes,
898
require_versioned=True):
899
state = self.current_dirstate()
900
state._read_dirblocks_if_needed()
901
def _entries_for_path(path):
902
"""Return a list with all the entries that match path for all ids.
904
dirname, basename = os.path.split(path)
905
key = (dirname, basename, '')
906
block_index, present = state._find_block_index_from_key(key)
908
# the block which should contain path is absent.
911
block = state._dirblocks[block_index][1]
912
entry_index, _ = state._find_entry_index(key, block)
913
# we may need to look at multiple entries at this path: walk while the paths match.
914
while (entry_index < len(block) and
915
block[entry_index][0][0:2] == key[0:2]):
916
result.append(block[entry_index])
919
if require_versioned:
920
# -- check all supplied paths are versioned in a search tree. --
923
path_entries = _entries_for_path(path)
925
# this specified path is not present at all: error
926
all_versioned = False
928
found_versioned = False
929
# for each id at this path
930
for entry in path_entries:
932
for index in search_indexes:
933
if entry[1][index][0] != 'a': # absent
934
found_versioned = True
935
# all good: found a versioned cell
937
if not found_versioned:
938
# none of the indexes was not 'absent' at all ids for this
940
all_versioned = False
942
if not all_versioned:
943
raise errors.PathsNotVersionedError(paths)
944
# -- remove redundancy in supplied paths to prevent over-scanning --
947
other_paths = paths.difference(set([path]))
948
if not osutils.is_inside_any(other_paths, path):
949
# this is a top level path, we must check it.
950
search_paths.add(path)
952
# for all search_indexs in each path at or under each element of
953
# search_paths, if the detail is relocated: add the id, and add the
954
# relocated path as one to search if its not searched already. If the
955
# detail is not relocated, add the id.
956
searched_paths = set()
958
def _process_entry(entry):
959
"""Look at search_indexes within entry.
961
If a specific tree's details are relocated, add the relocation
962
target to search_paths if not searched already. If it is absent, do
963
nothing. Otherwise add the id to found_ids.
965
for index in search_indexes:
966
if entry[1][index][0] == 'r': # relocated
967
if not osutils.is_inside_any(searched_paths, entry[1][index][1]):
968
search_paths.add(entry[1][index][1])
969
elif entry[1][index][0] != 'a': # absent
970
found_ids.add(entry[0][2])
972
current_root = search_paths.pop()
973
searched_paths.add(current_root)
974
# process the entries for this containing directory: the rest will be
975
# found by their parents recursively.
976
root_entries = _entries_for_path(current_root)
978
# this specified path is not present at all, skip it.
980
for entry in root_entries:
981
_process_entry(entry)
982
initial_key = (current_root, '', '')
983
block_index, _ = state._find_block_index_from_key(initial_key)
984
while (block_index < len(state._dirblocks) and
985
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
986
for entry in state._dirblocks[block_index][1]:
987
_process_entry(entry)
991
def _paths2ids_using_bisect(self, paths, search_indexes,
992
require_versioned=True):
993
state = self.current_dirstate()
996
split_paths = sorted(osutils.split(p) for p in paths)
997
found = state._bisect_recursive(split_paths)
999
if require_versioned:
1000
found_dir_names = set(dir_name_id[:2] for dir_name_id in found)
1001
for dir_name in split_paths:
1002
if dir_name not in found_dir_names:
1003
raise errors.PathsNotVersionedError(paths)
1005
for dir_name_id, trees_info in found.iteritems():
1006
for index in search_indexes:
1007
if trees_info[index][0] not in ('r', 'a'):
1008
found_ids.add(dir_name_id[2])
1011
def read_working_inventory(self):
1012
"""Read the working inventory.
1014
This is a meaningless operation for dirstate, but we obey it anyhow.
1016
return self.inventory
1019
def revision_tree(self, revision_id):
1020
"""See Tree.revision_tree.
1022
WorkingTree4 supplies revision_trees for any basis tree.
1024
revision_id = osutils.safe_revision_id(revision_id)
1025
dirstate = self.current_dirstate()
1026
parent_ids = dirstate.get_parent_ids()
1027
if revision_id not in parent_ids:
1028
raise errors.NoSuchRevisionInTree(self, revision_id)
1029
if revision_id in dirstate.get_ghosts():
1030
raise errors.NoSuchRevisionInTree(self, revision_id)
1031
return DirStateRevisionTree(dirstate, revision_id,
1032
self.branch.repository)
1034
@needs_tree_write_lock
1035
def set_last_revision(self, new_revision):
1036
"""Change the last revision in the working tree."""
1037
new_revision = osutils.safe_revision_id(new_revision)
1038
parents = self.get_parent_ids()
1039
if new_revision in (NULL_REVISION, None):
1040
assert len(parents) < 2, (
1041
"setting the last parent to none with a pending merge is "
1043
self.set_parent_ids([])
1045
self.set_parent_ids([new_revision] + parents[1:],
1046
allow_leftmost_as_ghost=True)
1048
@needs_tree_write_lock
1049
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
1050
"""Set the parent ids to revision_ids.
1052
See also set_parent_trees. This api will try to retrieve the tree data
1053
for each element of revision_ids from the trees repository. If you have
1054
tree data already available, it is more efficient to use
1055
set_parent_trees rather than set_parent_ids. set_parent_ids is however
1056
an easier API to use.
1058
:param revision_ids: The revision_ids to set as the parent ids of this
1059
working tree. Any of these may be ghosts.
1061
revision_ids = [osutils.safe_revision_id(r) for r in revision_ids]
1063
for revision_id in revision_ids:
1065
revtree = self.branch.repository.revision_tree(revision_id)
1066
# TODO: jam 20070213 KnitVersionedFile raises
1067
# RevisionNotPresent rather than NoSuchRevision if a
1068
# given revision_id is not present. Should Repository be
1069
# catching it and re-raising NoSuchRevision?
1070
except (errors.NoSuchRevision, errors.RevisionNotPresent):
1072
trees.append((revision_id, revtree))
1073
self.set_parent_trees(trees,
1074
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
1076
@needs_tree_write_lock
1077
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
1078
"""Set the parents of the working tree.
1080
:param parents_list: A list of (revision_id, tree) tuples.
1081
If tree is None, then that element is treated as an unreachable
1082
parent tree - i.e. a ghost.
1084
dirstate = self.current_dirstate()
1085
if len(parents_list) > 0:
1086
if not allow_leftmost_as_ghost and parents_list[0][1] is None:
1087
raise errors.GhostRevisionUnusableHere(parents_list[0][0])
1090
# convert absent trees to the null tree, which we convert back to
1091
# missing on access.
1092
for rev_id, tree in parents_list:
1093
rev_id = osutils.safe_revision_id(rev_id)
1094
_mod_revision.check_not_reserved_id(rev_id)
1095
if tree is not None:
1096
real_trees.append((rev_id, tree))
1098
real_trees.append((rev_id,
1099
self.branch.repository.revision_tree(None)))
1100
ghosts.append(rev_id)
1101
dirstate.set_parent_trees(real_trees, ghosts=ghosts)
1102
self._make_dirty(reset_inventory=False)
1104
def _set_root_id(self, file_id):
1105
"""See WorkingTree.set_root_id."""
1106
state = self.current_dirstate()
1107
state.set_path_id('', file_id)
1108
if state._dirblock_state == dirstate.DirState.IN_MEMORY_MODIFIED:
1109
self._make_dirty(reset_inventory=True)
1112
def supports_tree_reference(self):
1113
return self._repo_supports_tree_reference
1116
"""Unlock in format 4 trees needs to write the entire dirstate."""
1117
# do non-implementation specific cleanup
1120
if self._control_files._lock_count == 1:
1121
# eventually we should do signature checking during read locks for
1123
if self._control_files._lock_mode == 'w':
1126
if self._dirstate is not None:
1127
# This is a no-op if there are no modifications.
1128
self._dirstate.save()
1129
self._dirstate.unlock()
1130
# TODO: jam 20070301 We shouldn't have to wipe the dirstate at this
1131
# point. Instead, it could check if the header has been
1132
# modified when it is locked, and if not, it can hang on to
1133
# the data it has in memory.
1134
self._dirstate = None
1135
self._inventory = None
1136
# reverse order of locking.
1138
return self._control_files.unlock()
1140
self.branch.unlock()
1142
@needs_tree_write_lock
1143
def unversion(self, file_ids):
1144
"""Remove the file ids in file_ids from the current versioned set.
1146
When a file_id is unversioned, all of its children are automatically
1149
:param file_ids: The file ids to stop versioning.
1150
:raises: NoSuchId if any fileid is not currently versioned.
1154
state = self.current_dirstate()
1155
state._read_dirblocks_if_needed()
1156
ids_to_unversion = set()
1157
for file_id in file_ids:
1158
ids_to_unversion.add(osutils.safe_file_id(file_id))
1159
paths_to_unversion = set()
1161
# check if the root is to be unversioned, if so, assert for now.
1162
# walk the state marking unversioned things as absent.
1163
# if there are any un-unversioned ids at the end, raise
1164
for key, details in state._dirblocks[0][1]:
1165
if (details[0][0] not in ('a', 'r') and # absent or relocated
1166
key[2] in ids_to_unversion):
1167
# I haven't written the code to unversion / yet - it should be
1169
raise errors.BzrError('Unversioning the / is not currently supported')
1171
while block_index < len(state._dirblocks):
1172
# process one directory at a time.
1173
block = state._dirblocks[block_index]
1174
# first check: is the path one to remove - it or its children
1175
delete_block = False
1176
for path in paths_to_unversion:
1177
if (block[0].startswith(path) and
1178
(len(block[0]) == len(path) or
1179
block[0][len(path)] == '/')):
1180
# this entire block should be deleted - its the block for a
1181
# path to unversion; or the child of one
1184
# TODO: trim paths_to_unversion as we pass by paths
1186
# this block is to be deleted: process it.
1187
# TODO: we can special case the no-parents case and
1188
# just forget the whole block.
1190
while entry_index < len(block[1]):
1191
# Mark this file id as having been removed
1192
entry = block[1][entry_index]
1193
ids_to_unversion.discard(entry[0][2])
1194
if (entry[1][0][0] == 'a'
1195
or not state._make_absent(entry)):
1197
# go to the next block. (At the moment we dont delete empty
1202
while entry_index < len(block[1]):
1203
entry = block[1][entry_index]
1204
if (entry[1][0][0] in ('a', 'r') or # absent, relocated
1205
# ^ some parent row.
1206
entry[0][2] not in ids_to_unversion):
1207
# ^ not an id to unversion
1210
if entry[1][0][0] == 'd':
1211
paths_to_unversion.add(pathjoin(entry[0][0], entry[0][1]))
1212
if not state._make_absent(entry):
1214
# we have unversioned this id
1215
ids_to_unversion.remove(entry[0][2])
1217
if ids_to_unversion:
1218
raise errors.NoSuchId(self, iter(ids_to_unversion).next())
1219
self._make_dirty(reset_inventory=False)
1220
# have to change the legacy inventory too.
1221
if self._inventory is not None:
1222
for file_id in file_ids:
1223
self._inventory.remove_recursive_id(file_id)
1226
def _validate(self):
1227
self._dirstate._validate()
1229
@needs_tree_write_lock
1230
def _write_inventory(self, inv):
1231
"""Write inventory as the current inventory."""
1232
assert not self._dirty, "attempting to write an inventory when the dirstate is dirty will cause data loss"
1233
self.current_dirstate().set_state_from_inventory(inv)
1234
self._make_dirty(reset_inventory=False)
1235
if self._inventory is not None:
1236
self._inventory = inv
1240
class WorkingTreeFormat4(WorkingTreeFormat3):
1241
"""The first consolidated dirstate working tree format.
1244
- exists within a metadir controlling .bzr
1245
- includes an explicit version marker for the workingtree control
1246
files, separate from the BzrDir format
1247
- modifies the hash cache format
1248
- is new in bzr 0.15
1249
- uses a LockDir to guard access to it.
1252
upgrade_recommended = False
1254
def get_format_string(self):
1255
"""See WorkingTreeFormat.get_format_string()."""
1256
return "Bazaar Working Tree Format 4 (bzr 0.15)\n"
1258
def get_format_description(self):
1259
"""See WorkingTreeFormat.get_format_description()."""
1260
return "Working tree format 4"
1262
def initialize(self, a_bzrdir, revision_id=None):
1263
"""See WorkingTreeFormat.initialize().
1265
:param revision_id: allows creating a working tree at a different
1266
revision than the branch is at.
1268
These trees get an initial random root id, if their repository supports
1269
rich root data, TREE_ROOT otherwise.
1271
revision_id = osutils.safe_revision_id(revision_id)
1272
if not isinstance(a_bzrdir.transport, LocalTransport):
1273
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1274
transport = a_bzrdir.get_workingtree_transport(self)
1275
control_files = self._open_control_files(a_bzrdir)
1276
control_files.create_lock()
1277
control_files.lock_write()
1278
control_files.put_utf8('format', self.get_format_string())
1279
branch = a_bzrdir.open_branch()
1280
if revision_id is None:
1281
revision_id = branch.last_revision()
1282
local_path = transport.local_abspath('dirstate')
1283
# write out new dirstate (must exist when we create the tree)
1284
state = dirstate.DirState.initialize(local_path)
1287
wt = WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
1291
_control_files=control_files)
1293
wt.lock_tree_write()
1295
if revision_id in (None, NULL_REVISION):
1296
if branch.repository.supports_rich_root():
1297
wt._set_root_id(generate_ids.gen_root_id())
1299
wt._set_root_id(ROOT_ID)
1301
wt.set_last_revision(revision_id)
1303
basis = wt.basis_tree()
1305
# if the basis has a root id we have to use that; otherwise we use
1307
basis_root_id = basis.get_root_id()
1308
if basis_root_id is not None:
1309
wt._set_root_id(basis_root_id)
1311
transform.build_tree(basis, wt)
1314
control_files.unlock()
1318
def _open(self, a_bzrdir, control_files):
1319
"""Open the tree itself.
1321
:param a_bzrdir: the dir for the tree.
1322
:param control_files: the control files for the tree.
1324
return WorkingTree4(a_bzrdir.root_transport.local_abspath('.'),
1325
branch=a_bzrdir.open_branch(),
1328
_control_files=control_files)
1330
def __get_matchingbzrdir(self):
1331
# please test against something that will let us do tree references
1332
return bzrdir.format_registry.make_bzrdir(
1333
'dirstate-with-subtree')
1335
_matchingbzrdir = property(__get_matchingbzrdir)
1338
class DirStateRevisionTree(Tree):
1339
"""A revision tree pulling the inventory from a dirstate."""
1341
def __init__(self, dirstate, revision_id, repository):
1342
self._dirstate = dirstate
1343
self._revision_id = osutils.safe_revision_id(revision_id)
1344
self._repository = repository
1345
self._inventory = None
1347
self._dirstate_locked = False
1350
return "<%s of %s in %s>" % \
1351
(self.__class__.__name__, self._revision_id, self._dirstate)
1353
def annotate_iter(self, file_id,
1354
default_revision=_mod_revision.CURRENT_REVISION):
1355
"""See Tree.annotate_iter"""
1356
w = self._get_weave(file_id)
1357
return w.annotate_iter(self.inventory[file_id].revision)
1359
def _get_ancestors(self, default_revision):
1360
return set(self._repository.get_ancestry(self._revision_id,
1362
def _comparison_data(self, entry, path):
1363
"""See Tree._comparison_data."""
1365
return None, False, None
1366
# trust the entry as RevisionTree does, but this may not be
1367
# sensible: the entry might not have come from us?
1368
return entry.kind, entry.executable, None
1370
def _file_size(self, entry, stat_value):
1371
return entry.text_size
1373
def filter_unversioned_files(self, paths):
1374
"""Filter out paths that are not versioned.
1376
:return: set of paths.
1378
pred = self.has_filename
1379
return set((p for p in paths if not pred(p)))
1381
def get_root_id(self):
1382
return self.path2id('')
1384
def _get_parent_index(self):
1385
"""Return the index in the dirstate referenced by this tree."""
1386
return self._dirstate.get_parent_ids().index(self._revision_id) + 1
1388
def _get_entry(self, file_id=None, path=None):
1389
"""Get the dirstate row for file_id or path.
1391
If either file_id or path is supplied, it is used as the key to lookup.
1392
If both are supplied, the fastest lookup is used, and an error is
1393
raised if they do not both point at the same row.
1395
:param file_id: An optional unicode file_id to be looked up.
1396
:param path: An optional unicode path to be looked up.
1397
:return: The dirstate row tuple for path/file_id, or (None, None)
1399
if file_id is None and path is None:
1400
raise errors.BzrError('must supply file_id or path')
1401
file_id = osutils.safe_file_id(file_id)
1402
if path is not None:
1403
path = path.encode('utf8')
1404
parent_index = self._get_parent_index()
1405
return self._dirstate._get_entry(parent_index, fileid_utf8=file_id, path_utf8=path)
1407
def _generate_inventory(self):
1408
"""Create and set self.inventory from the dirstate object.
1410
(So this is only called the first time the inventory is requested for
1411
this tree; it then remains in memory until it's out of date.)
1413
This is relatively expensive: we have to walk the entire dirstate.
1415
assert self._locked, 'cannot generate inventory of an unlocked '\
1416
'dirstate revision tree'
1417
# separate call for profiling - makes it clear where the costs are.
1418
self._dirstate._read_dirblocks_if_needed()
1419
assert self._revision_id in self._dirstate.get_parent_ids(), \
1420
'parent %s has disappeared from %s' % (
1421
self._revision_id, self._dirstate.get_parent_ids())
1422
parent_index = self._dirstate.get_parent_ids().index(self._revision_id) + 1
1423
# This is identical now to the WorkingTree _generate_inventory except
1424
# for the tree index use.
1425
root_key, current_entry = self._dirstate._get_entry(parent_index, path_utf8='')
1426
current_id = root_key[2]
1427
assert current_entry[parent_index][0] == 'd'
1428
inv = Inventory(root_id=current_id, revision_id=self._revision_id)
1429
inv.root.revision = current_entry[parent_index][4]
1430
# Turn some things into local variables
1431
minikind_to_kind = dirstate.DirState._minikind_to_kind
1432
factory = entry_factory
1433
utf8_decode = cache_utf8._utf8_decode
1434
inv_byid = inv._byid
1435
# we could do this straight out of the dirstate; it might be fast
1436
# and should be profiled - RBC 20070216
1437
parent_ies = {'' : inv.root}
1438
for block in self._dirstate._dirblocks[1:]: #skip root
1441
parent_ie = parent_ies[dirname]
1443
# all the paths in this block are not versioned in this tree
1445
for key, entry in block[1]:
1446
minikind, fingerprint, size, executable, revid = entry[parent_index]
1447
if minikind in ('a', 'r'): # absent, relocated
1451
name_unicode = utf8_decode(name)[0]
1453
kind = minikind_to_kind[minikind]
1454
inv_entry = factory[kind](file_id, name_unicode,
1456
inv_entry.revision = revid
1458
inv_entry.executable = executable
1459
inv_entry.text_size = size
1460
inv_entry.text_sha1 = fingerprint
1461
elif kind == 'directory':
1462
parent_ies[(dirname + '/' + name).strip('/')] = inv_entry
1463
elif kind == 'symlink':
1464
inv_entry.executable = False
1465
inv_entry.text_size = None
1466
inv_entry.symlink_target = utf8_decode(fingerprint)[0]
1467
elif kind == 'tree-reference':
1468
inv_entry.reference_revision = fingerprint or None
1470
raise AssertionError("cannot convert entry %r into an InventoryEntry"
1472
# These checks cost us around 40ms on a 55k entry tree
1473
assert file_id not in inv_byid
1474
assert name_unicode not in parent_ie.children
1475
inv_byid[file_id] = inv_entry
1476
parent_ie.children[name_unicode] = inv_entry
1477
self._inventory = inv
1479
def get_file_mtime(self, file_id, path=None):
1480
"""Return the modification time for this record.
1482
We return the timestamp of the last-changed revision.
1484
# Make sure the file exists
1485
entry = self._get_entry(file_id, path=path)
1486
if entry == (None, None): # do we raise?
1488
parent_index = self._get_parent_index()
1489
last_changed_revision = entry[1][parent_index][4]
1490
return self._repository.get_revision(last_changed_revision).timestamp
1492
def get_file_sha1(self, file_id, path=None, stat_value=None):
1493
entry = self._get_entry(file_id=file_id, path=path)
1494
parent_index = self._get_parent_index()
1495
parent_details = entry[1][parent_index]
1496
if parent_details[0] == 'f':
1497
return parent_details[1]
1500
@symbol_versioning.deprecated_method(symbol_versioning.zero_ninety)
1501
def get_weave(self, file_id):
1502
return self._get_weave(file_id)
1504
def _get_weave(self, file_id):
1505
return self._repository.weave_store.get_weave(file_id,
1506
self._repository.get_transaction())
1508
def get_file(self, file_id):
1509
return StringIO(self.get_file_text(file_id))
1511
def get_file_lines(self, file_id):
1512
ie = self.inventory[file_id]
1513
return self._get_weave(file_id).get_lines(ie.revision)
1515
def get_file_size(self, file_id):
1516
return self.inventory[file_id].text_size
1518
def get_file_text(self, file_id):
1519
return ''.join(self.get_file_lines(file_id))
1521
def get_reference_revision(self, file_id, path=None):
1522
return self.inventory[file_id].reference_revision
1524
def iter_files_bytes(self, desired_files):
1525
"""See Tree.iter_files_bytes.
1527
This version is implemented on top of Repository.iter_files_bytes"""
1528
parent_index = self._get_parent_index()
1529
repo_desired_files = []
1530
for file_id, identifier in desired_files:
1531
entry = self._get_entry(file_id)
1532
if entry == (None, None):
1533
raise errors.NoSuchId(self, file_id)
1534
repo_desired_files.append((file_id, entry[1][parent_index][4],
1536
return self._repository.iter_files_bytes(repo_desired_files)
1538
def get_symlink_target(self, file_id):
1539
entry = self._get_entry(file_id=file_id)
1540
parent_index = self._get_parent_index()
1541
if entry[1][parent_index][0] != 'l':
1544
# At present, none of the tree implementations supports non-ascii
1545
# symlink targets. So we will just assume that the dirstate path is
1547
return entry[1][parent_index][1]
1549
def get_revision_id(self):
1550
"""Return the revision id for this tree."""
1551
return self._revision_id
1553
def _get_inventory(self):
1554
if self._inventory is not None:
1555
return self._inventory
1556
self._must_be_locked()
1557
self._generate_inventory()
1558
return self._inventory
1560
inventory = property(_get_inventory,
1561
doc="Inventory of this Tree")
1563
def get_parent_ids(self):
1564
"""The parents of a tree in the dirstate are not cached."""
1565
return self._repository.get_revision(self._revision_id).parent_ids
1567
def has_filename(self, filename):
1568
return bool(self.path2id(filename))
1570
def kind(self, file_id):
1571
return self.inventory[file_id].kind
1573
def is_executable(self, file_id, path=None):
1574
ie = self.inventory[file_id]
1575
if ie.kind != "file":
1577
return ie.executable
1579
def list_files(self, include_root=False):
1580
# We use a standard implementation, because DirStateRevisionTree is
1581
# dealing with one of the parents of the current state
1582
inv = self._get_inventory()
1583
entries = inv.iter_entries()
1584
if self.inventory.root is not None and not include_root:
1586
for path, entry in entries:
1587
yield path, 'V', entry.kind, entry.file_id, entry
1589
def lock_read(self):
1590
"""Lock the tree for a set of operations."""
1591
if not self._locked:
1592
self._repository.lock_read()
1593
if self._dirstate._lock_token is None:
1594
self._dirstate.lock_read()
1595
self._dirstate_locked = True
1598
def _must_be_locked(self):
1599
if not self._locked:
1600
raise errors.ObjectNotLocked(self)
1603
def path2id(self, path):
1604
"""Return the id for path in this tree."""
1605
# lookup by path: faster than splitting and walking the ivnentory.
1606
entry = self._get_entry(path=path)
1607
if entry == (None, None):
1612
"""Unlock, freeing any cache memory used during the lock."""
1613
# outside of a lock, the inventory is suspect: release it.
1615
if not self._locked:
1616
self._inventory = None
1618
if self._dirstate_locked:
1619
self._dirstate.unlock()
1620
self._dirstate_locked = False
1621
self._repository.unlock()
1623
def walkdirs(self, prefix=""):
1624
# TODO: jam 20070215 This is the lazy way by using the RevisionTree
1625
# implementation based on an inventory.
1626
# This should be cleaned up to use the much faster Dirstate code
1627
# So for now, we just build up the parent inventory, and extract
1628
# it the same way RevisionTree does.
1629
_directory = 'directory'
1630
inv = self._get_inventory()
1631
top_id = inv.path2id(prefix)
1635
pending = [(prefix, top_id)]
1638
relpath, file_id = pending.pop()
1639
# 0 - relpath, 1- file-id
1641
relroot = relpath + '/'
1644
# FIXME: stash the node in pending
1645
entry = inv[file_id]
1646
for name, child in entry.sorted_children():
1647
toppath = relroot + name
1648
dirblock.append((toppath, name, child.kind, None,
1649
child.file_id, child.kind
1651
yield (relpath, entry.file_id), dirblock
1652
# push the user specified dirs from dirblock
1653
for dir in reversed(dirblock):
1654
if dir[2] == _directory:
1655
pending.append((dir[0], dir[4]))
1658
class InterDirStateTree(InterTree):
1659
"""Fast path optimiser for changes_from with dirstate trees.
1661
This is used only when both trees are in the dirstate working file, and
1662
the source is any parent within the dirstate, and the destination is
1663
the current working tree of the same dirstate.
1665
# this could be generalized to allow comparisons between any trees in the
1666
# dirstate, and possibly between trees stored in different dirstates.
1668
def __init__(self, source, target):
1669
super(InterDirStateTree, self).__init__(source, target)
1670
if not InterDirStateTree.is_compatible(source, target):
1671
raise Exception, "invalid source %r and target %r" % (source, target)
1674
def make_source_parent_tree(source, target):
1675
"""Change the source tree into a parent of the target."""
1676
revid = source.commit('record tree')
1677
target.branch.repository.fetch(source.branch.repository, revid)
1678
target.set_parent_ids([revid])
1679
return target.basis_tree(), target
1681
_matching_from_tree_format = WorkingTreeFormat4()
1682
_matching_to_tree_format = WorkingTreeFormat4()
1683
_test_mutable_trees_to_test_trees = make_source_parent_tree
1685
def _iter_changes(self, include_unchanged=False,
1686
specific_files=None, pb=None, extra_trees=[],
1687
require_versioned=True, want_unversioned=False):
1688
"""Return the changes from source to target.
1690
:return: An iterator that yields tuples. See InterTree._iter_changes
1692
:param specific_files: An optional list of file paths to restrict the
1693
comparison to. When mapping filenames to ids, all matches in all
1694
trees (including optional extra_trees) are used, and all children of
1695
matched directories are included.
1696
:param include_unchanged: An optional boolean requesting the inclusion of
1697
unchanged entries in the result.
1698
:param extra_trees: An optional list of additional trees to use when
1699
mapping the contents of specific_files (paths) to file_ids.
1700
:param require_versioned: If True, all files in specific_files must be
1701
versioned in one of source, target, extra_trees or
1702
PathsNotVersionedError is raised.
1703
:param want_unversioned: Should unversioned files be returned in the
1704
output. An unversioned file is defined as one with (False, False)
1705
for the versioned pair.
1707
utf8_decode = cache_utf8._utf8_decode
1708
_minikind_to_kind = dirstate.DirState._minikind_to_kind
1709
cmp_by_dirs = dirstate.cmp_by_dirs
1710
# NB: show_status depends on being able to pass in non-versioned files
1711
# and report them as unknown
1712
# TODO: handle extra trees in the dirstate.
1713
# TODO: handle comparisons as an empty tree as a different special
1714
# case? mbp 20070226
1715
if extra_trees or (self.source._revision_id == NULL_REVISION):
1716
# we can't fast-path these cases (yet)
1717
for f in super(InterDirStateTree, self)._iter_changes(
1718
include_unchanged, specific_files, pb, extra_trees,
1719
require_versioned, want_unversioned=want_unversioned):
1722
parent_ids = self.target.get_parent_ids()
1723
assert (self.source._revision_id in parent_ids), \
1724
"revision {%s} is not stored in {%s}, but %s " \
1725
"can only be used for trees stored in the dirstate" \
1726
% (self.source._revision_id, self.target, self._iter_changes)
1728
if self.source._revision_id == NULL_REVISION:
1730
indices = (target_index,)
1732
assert (self.source._revision_id in parent_ids), \
1733
"Failure: source._revision_id: %s not in target.parent_ids(%s)" % (
1734
self.source._revision_id, parent_ids)
1735
source_index = 1 + parent_ids.index(self.source._revision_id)
1736
indices = (source_index,target_index)
1737
# -- make all specific_files utf8 --
1739
specific_files_utf8 = set()
1740
for path in specific_files:
1741
specific_files_utf8.add(path.encode('utf8'))
1742
specific_files = specific_files_utf8
1744
specific_files = set([''])
1745
# -- specific_files is now a utf8 path set --
1746
# -- get the state object and prepare it.
1747
state = self.target.current_dirstate()
1748
state._read_dirblocks_if_needed()
1749
def _entries_for_path(path):
1750
"""Return a list with all the entries that match path for all ids.
1752
dirname, basename = os.path.split(path)
1753
key = (dirname, basename, '')
1754
block_index, present = state._find_block_index_from_key(key)
1756
# the block which should contain path is absent.
1759
block = state._dirblocks[block_index][1]
1760
entry_index, _ = state._find_entry_index(key, block)
1761
# we may need to look at multiple entries at this path: walk while the specific_files match.
1762
while (entry_index < len(block) and
1763
block[entry_index][0][0:2] == key[0:2]):
1764
result.append(block[entry_index])
1767
if require_versioned:
1768
# -- check all supplied paths are versioned in a search tree. --
1769
all_versioned = True
1770
for path in specific_files:
1771
path_entries = _entries_for_path(path)
1772
if not path_entries:
1773
# this specified path is not present at all: error
1774
all_versioned = False
1776
found_versioned = False
1777
# for each id at this path
1778
for entry in path_entries:
1780
for index in indices:
1781
if entry[1][index][0] != 'a': # absent
1782
found_versioned = True
1783
# all good: found a versioned cell
1785
if not found_versioned:
1786
# none of the indexes was not 'absent' at all ids for this
1788
all_versioned = False
1790
if not all_versioned:
1791
raise errors.PathsNotVersionedError(specific_files)
1792
# -- remove redundancy in supplied specific_files to prevent over-scanning --
1793
search_specific_files = set()
1794
for path in specific_files:
1795
other_specific_files = specific_files.difference(set([path]))
1796
if not osutils.is_inside_any(other_specific_files, path):
1797
# this is a top level path, we must check it.
1798
search_specific_files.add(path)
1800
# compare source_index and target_index at or under each element of search_specific_files.
1801
# follow the following comparison table. Note that we only want to do diff operations when
1802
# the target is fdl because thats when the walkdirs logic will have exposed the pathinfo
1806
# Source | Target | disk | action
1807
# r | fdlt | | add source to search, add id path move and perform
1808
# | | | diff check on source-target
1809
# r | fdlt | a | dangling file that was present in the basis.
1811
# r | a | | add source to search
1813
# r | r | | this path is present in a non-examined tree, skip.
1814
# r | r | a | this path is present in a non-examined tree, skip.
1815
# a | fdlt | | add new id
1816
# a | fdlt | a | dangling locally added file, skip
1817
# a | a | | not present in either tree, skip
1818
# a | a | a | not present in any tree, skip
1819
# a | r | | not present in either tree at this path, skip as it
1820
# | | | may not be selected by the users list of paths.
1821
# a | r | a | not present in either tree at this path, skip as it
1822
# | | | may not be selected by the users list of paths.
1823
# fdlt | fdlt | | content in both: diff them
1824
# fdlt | fdlt | a | deleted locally, but not unversioned - show as deleted ?
1825
# fdlt | a | | unversioned: output deleted id for now
1826
# fdlt | a | a | unversioned and deleted: output deleted id
1827
# fdlt | r | | relocated in this tree, so add target to search.
1828
# | | | Dont diff, we will see an r,fd; pair when we reach
1829
# | | | this id at the other path.
1830
# fdlt | r | a | relocated in this tree, so add target to search.
1831
# | | | Dont diff, we will see an r,fd; pair when we reach
1832
# | | | this id at the other path.
1834
# for all search_indexs in each path at or under each element of
1835
# search_specific_files, if the detail is relocated: add the id, and add the
1836
# relocated path as one to search if its not searched already. If the
1837
# detail is not relocated, add the id.
1838
searched_specific_files = set()
1839
NULL_PARENT_DETAILS = dirstate.DirState.NULL_PARENT_DETAILS
1840
# Using a list so that we can access the values and change them in
1841
# nested scope. Each one is [path, file_id, entry]
1842
last_source_parent = [None, None]
1843
last_target_parent = [None, None]
1845
use_filesystem_for_exec = (sys.platform != 'win32')
1847
# Just a sentry, so that _process_entry can say that this
1848
# record is handled, but isn't interesting to process (unchanged)
1849
uninteresting = object()
1852
old_dirname_to_file_id = {}
1853
new_dirname_to_file_id = {}
1854
# TODO: jam 20070516 - Avoid the _get_entry lookup overhead by
1855
# keeping a cache of directories that we have seen.
1857
def _process_entry(entry, path_info):
1858
"""Compare an entry and real disk to generate delta information.
1860
:param path_info: top_relpath, basename, kind, lstat, abspath for
1861
the path of entry. If None, then the path is considered absent.
1862
(Perhaps we should pass in a concrete entry for this ?)
1863
Basename is returned as a utf8 string because we expect this
1864
tuple will be ignored, and don't want to take the time to
1866
:return: None if these don't match
1867
A tuple of information about the change, or
1868
the object 'uninteresting' if these match, but are
1869
basically identical.
1871
if source_index is None:
1872
source_details = NULL_PARENT_DETAILS
1874
source_details = entry[1][source_index]
1875
target_details = entry[1][target_index]
1876
target_minikind = target_details[0]
1877
if path_info is not None and target_minikind in 'fdlt':
1878
assert target_index == 0
1879
link_or_sha1 = state.update_entry(entry, abspath=path_info[4],
1880
stat_value=path_info[3])
1881
# The entry may have been modified by update_entry
1882
target_details = entry[1][target_index]
1883
target_minikind = target_details[0]
1886
file_id = entry[0][2]
1887
source_minikind = source_details[0]
1888
if source_minikind in 'fdltr' and target_minikind in 'fdlt':
1889
# claimed content in both: diff
1890
# r | fdlt | | add source to search, add id path move and perform
1891
# | | | diff check on source-target
1892
# r | fdlt | a | dangling file that was present in the basis.
1894
if source_minikind in 'r':
1895
# add the source to the search path to find any children it
1896
# has. TODO ? : only add if it is a container ?
1897
if not osutils.is_inside_any(searched_specific_files,
1899
search_specific_files.add(source_details[1])
1900
# generate the old path; this is needed for stating later
1902
old_path = source_details[1]
1903
old_dirname, old_basename = os.path.split(old_path)
1904
path = pathjoin(entry[0][0], entry[0][1])
1905
old_entry = state._get_entry(source_index,
1907
# update the source details variable to be the real
1909
source_details = old_entry[1][source_index]
1910
source_minikind = source_details[0]
1912
old_dirname = entry[0][0]
1913
old_basename = entry[0][1]
1914
old_path = path = None
1915
if path_info is None:
1916
# the file is missing on disk, show as removed.
1917
content_change = True
1921
# source and target are both versioned and disk file is present.
1922
target_kind = path_info[2]
1923
if target_kind == 'directory':
1925
old_path = path = pathjoin(old_dirname, old_basename)
1926
new_dirname_to_file_id[path] = file_id
1927
if source_minikind != 'd':
1928
content_change = True
1930
# directories have no fingerprint
1931
content_change = False
1933
elif target_kind == 'file':
1934
if source_minikind != 'f':
1935
content_change = True
1937
# We could check the size, but we already have the
1939
content_change = (link_or_sha1 != source_details[1])
1940
# Target details is updated at update_entry time
1941
if use_filesystem_for_exec:
1942
# We don't need S_ISREG here, because we are sure
1943
# we are dealing with a file.
1944
target_exec = bool(stat.S_IEXEC & path_info[3].st_mode)
1946
target_exec = target_details[3]
1947
elif target_kind == 'symlink':
1948
if source_minikind != 'l':
1949
content_change = True
1951
content_change = (link_or_sha1 != source_details[1])
1953
elif target_kind == 'tree-reference':
1954
if source_minikind != 't':
1955
content_change = True
1957
content_change = False
1960
raise Exception, "unknown kind %s" % path_info[2]
1961
if source_minikind == 'd':
1963
old_path = path = pathjoin(old_dirname, old_basename)
1964
old_dirname_to_file_id[old_path] = file_id
1965
# parent id is the entry for the path in the target tree
1966
if old_dirname == last_source_parent[0]:
1967
source_parent_id = last_source_parent[1]
1970
source_parent_id = old_dirname_to_file_id[old_dirname]
1972
source_parent_entry = state._get_entry(source_index,
1973
path_utf8=old_dirname)
1974
source_parent_id = source_parent_entry[0][2]
1975
if source_parent_id == entry[0][2]:
1976
# This is the root, so the parent is None
1977
source_parent_id = None
1979
last_source_parent[0] = old_dirname
1980
last_source_parent[1] = source_parent_id
1981
new_dirname = entry[0][0]
1982
if new_dirname == last_target_parent[0]:
1983
target_parent_id = last_target_parent[1]
1986
target_parent_id = new_dirname_to_file_id[new_dirname]
1988
# TODO: We don't always need to do the lookup, because the
1989
# parent entry will be the same as the source entry.
1990
target_parent_entry = state._get_entry(target_index,
1991
path_utf8=new_dirname)
1992
assert target_parent_entry != (None, None), (
1993
"Could not find target parent in wt: %s\nparent of: %s"
1994
% (new_dirname, entry))
1995
target_parent_id = target_parent_entry[0][2]
1996
if target_parent_id == entry[0][2]:
1997
# This is the root, so the parent is None
1998
target_parent_id = None
2000
last_target_parent[0] = new_dirname
2001
last_target_parent[1] = target_parent_id
2003
source_exec = source_details[3]
2004
if (include_unchanged
2006
or source_parent_id != target_parent_id
2007
or old_basename != entry[0][1]
2008
or source_exec != target_exec
2010
if old_path is None:
2011
old_path = path = pathjoin(old_dirname, old_basename)
2012
old_path_u = utf8_decode(old_path)[0]
2015
old_path_u = utf8_decode(old_path)[0]
2016
if old_path == path:
2019
path_u = utf8_decode(path)[0]
2020
source_kind = _minikind_to_kind[source_minikind]
2021
return (entry[0][2],
2022
(old_path_u, path_u),
2025
(source_parent_id, target_parent_id),
2026
(utf8_decode(old_basename)[0], utf8_decode(entry[0][1])[0]),
2027
(source_kind, target_kind),
2028
(source_exec, target_exec))
2030
return uninteresting
2031
elif source_minikind in 'a' and target_minikind in 'fdlt':
2032
# looks like a new file
2033
if path_info is not None:
2034
path = pathjoin(entry[0][0], entry[0][1])
2035
# parent id is the entry for the path in the target tree
2036
# TODO: these are the same for an entire directory: cache em.
2037
parent_id = state._get_entry(target_index,
2038
path_utf8=entry[0][0])[0][2]
2039
if parent_id == entry[0][2]:
2041
if use_filesystem_for_exec:
2042
# We need S_ISREG here, because we aren't sure if this
2045
stat.S_ISREG(path_info[3].st_mode)
2046
and stat.S_IEXEC & path_info[3].st_mode)
2048
target_exec = target_details[3]
2049
return (entry[0][2],
2050
(None, utf8_decode(path)[0]),
2054
(None, utf8_decode(entry[0][1])[0]),
2055
(None, path_info[2]),
2056
(None, target_exec))
2058
# but its not on disk: we deliberately treat this as just
2059
# never-present. (Why ?! - RBC 20070224)
2061
elif source_minikind in 'fdlt' and target_minikind in 'a':
2062
# unversioned, possibly, or possibly not deleted: we dont care.
2063
# if its still on disk, *and* theres no other entry at this
2064
# path [we dont know this in this routine at the moment -
2065
# perhaps we should change this - then it would be an unknown.
2066
old_path = pathjoin(entry[0][0], entry[0][1])
2067
# parent id is the entry for the path in the target tree
2068
parent_id = state._get_entry(source_index, path_utf8=entry[0][0])[0][2]
2069
if parent_id == entry[0][2]:
2071
return (entry[0][2],
2072
(utf8_decode(old_path)[0], None),
2076
(utf8_decode(entry[0][1])[0], None),
2077
(_minikind_to_kind[source_minikind], None),
2078
(source_details[3], None))
2079
elif source_minikind in 'fdlt' and target_minikind in 'r':
2080
# a rename; could be a true rename, or a rename inherited from
2081
# a renamed parent. TODO: handle this efficiently. Its not
2082
# common case to rename dirs though, so a correct but slow
2083
# implementation will do.
2084
if not osutils.is_inside_any(searched_specific_files, target_details[1]):
2085
search_specific_files.add(target_details[1])
2086
elif source_minikind in 'ra' and target_minikind in 'ra':
2087
# neither of the selected trees contain this file,
2088
# so skip over it. This is not currently directly tested, but
2089
# is indirectly via test_too_much.TestCommands.test_conflicts.
2092
raise AssertionError("don't know how to compare "
2093
"source_minikind=%r, target_minikind=%r"
2094
% (source_minikind, target_minikind))
2095
## import pdb;pdb.set_trace()
2098
while search_specific_files:
2099
# TODO: the pending list should be lexically sorted? the
2100
# interface doesn't require it.
2101
current_root = search_specific_files.pop()
2102
current_root_unicode = current_root.decode('utf8')
2103
searched_specific_files.add(current_root)
2104
# process the entries for this containing directory: the rest will be
2105
# found by their parents recursively.
2106
root_entries = _entries_for_path(current_root)
2107
root_abspath = self.target.abspath(current_root_unicode)
2109
root_stat = os.lstat(root_abspath)
2111
if e.errno == errno.ENOENT:
2112
# the path does not exist: let _process_entry know that.
2113
root_dir_info = None
2115
# some other random error: hand it up.
2118
root_dir_info = ('', current_root,
2119
osutils.file_kind_from_stat_mode(root_stat.st_mode), root_stat,
2121
if root_dir_info[2] == 'directory':
2122
if self.target._directory_is_tree_reference(
2123
current_root.decode('utf8')):
2124
root_dir_info = root_dir_info[:2] + \
2125
('tree-reference',) + root_dir_info[3:]
2127
if not root_entries and not root_dir_info:
2128
# this specified path is not present at all, skip it.
2130
path_handled = False
2131
for entry in root_entries:
2132
result = _process_entry(entry, root_dir_info)
2133
if result is not None:
2135
if result is not uninteresting:
2137
if want_unversioned and not path_handled and root_dir_info:
2138
new_executable = bool(
2139
stat.S_ISREG(root_dir_info[3].st_mode)
2140
and stat.S_IEXEC & root_dir_info[3].st_mode)
2142
(None, current_root_unicode),
2146
(None, splitpath(current_root_unicode)[-1]),
2147
(None, root_dir_info[2]),
2148
(None, new_executable)
2150
initial_key = (current_root, '', '')
2151
block_index, _ = state._find_block_index_from_key(initial_key)
2152
if block_index == 0:
2153
# we have processed the total root already, but because the
2154
# initial key matched it we should skip it here.
2156
if root_dir_info and root_dir_info[2] == 'tree-reference':
2157
current_dir_info = None
2159
dir_iterator = osutils._walkdirs_utf8(root_abspath, prefix=current_root)
2161
current_dir_info = dir_iterator.next()
2163
# on win32, python2.4 has e.errno == ERROR_DIRECTORY, but
2164
# python 2.5 has e.errno == EINVAL,
2165
# and e.winerror == ERROR_DIRECTORY
2166
e_winerror = getattr(e, 'winerror', None)
2167
win_errors = (ERROR_DIRECTORY, ERROR_PATH_NOT_FOUND)
2168
# there may be directories in the inventory even though
2169
# this path is not a file on disk: so mark it as end of
2171
if e.errno in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
2172
current_dir_info = None
2173
elif (sys.platform == 'win32'
2174
and (e.errno in win_errors
2175
or e_winerror in win_errors)):
2176
current_dir_info = None
2180
if current_dir_info[0][0] == '':
2181
# remove .bzr from iteration
2182
bzr_index = bisect_left(current_dir_info[1], ('.bzr',))
2183
assert current_dir_info[1][bzr_index][0] == '.bzr'
2184
del current_dir_info[1][bzr_index]
2185
# walk until both the directory listing and the versioned metadata
2187
if (block_index < len(state._dirblocks) and
2188
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
2189
current_block = state._dirblocks[block_index]
2191
current_block = None
2192
while (current_dir_info is not None or
2193
current_block is not None):
2194
if (current_dir_info and current_block
2195
and current_dir_info[0][0] != current_block[0]):
2196
if cmp_by_dirs(current_dir_info[0][0], current_block[0]) < 0:
2197
# filesystem data refers to paths not covered by the dirblock.
2198
# this has two possibilities:
2199
# A) it is versioned but empty, so there is no block for it
2200
# B) it is not versioned.
2202
# if (A) then we need to recurse into it to check for
2203
# new unknown files or directories.
2204
# if (B) then we should ignore it, because we don't
2205
# recurse into unknown directories.
2207
while path_index < len(current_dir_info[1]):
2208
current_path_info = current_dir_info[1][path_index]
2209
if want_unversioned:
2210
if current_path_info[2] == 'directory':
2211
if self.target._directory_is_tree_reference(
2212
current_path_info[0].decode('utf8')):
2213
current_path_info = current_path_info[:2] + \
2214
('tree-reference',) + current_path_info[3:]
2215
new_executable = bool(
2216
stat.S_ISREG(current_path_info[3].st_mode)
2217
and stat.S_IEXEC & current_path_info[3].st_mode)
2219
(None, utf8_decode(current_path_info[0])[0]),
2223
(None, utf8_decode(current_path_info[1])[0]),
2224
(None, current_path_info[2]),
2225
(None, new_executable))
2226
# dont descend into this unversioned path if it is
2228
if current_path_info[2] in ('directory',
2230
del current_dir_info[1][path_index]
2234
# This dir info has been handled, go to the next
2236
current_dir_info = dir_iterator.next()
2237
except StopIteration:
2238
current_dir_info = None
2240
# We have a dirblock entry for this location, but there
2241
# is no filesystem path for this. This is most likely
2242
# because a directory was removed from the disk.
2243
# We don't have to report the missing directory,
2244
# because that should have already been handled, but we
2245
# need to handle all of the files that are contained
2247
for current_entry in current_block[1]:
2248
# entry referring to file not present on disk.
2249
# advance the entry only, after processing.
2250
result = _process_entry(current_entry, None)
2251
if result is not None:
2252
if result is not uninteresting:
2255
if (block_index < len(state._dirblocks) and
2256
osutils.is_inside(current_root,
2257
state._dirblocks[block_index][0])):
2258
current_block = state._dirblocks[block_index]
2260
current_block = None
2263
if current_block and entry_index < len(current_block[1]):
2264
current_entry = current_block[1][entry_index]
2266
current_entry = None
2267
advance_entry = True
2269
if current_dir_info and path_index < len(current_dir_info[1]):
2270
current_path_info = current_dir_info[1][path_index]
2271
if current_path_info[2] == 'directory':
2272
if self.target._directory_is_tree_reference(
2273
current_path_info[0].decode('utf8')):
2274
current_path_info = current_path_info[:2] + \
2275
('tree-reference',) + current_path_info[3:]
2277
current_path_info = None
2279
path_handled = False
2280
while (current_entry is not None or
2281
current_path_info is not None):
2282
if current_entry is None:
2283
# the check for path_handled when the path is adnvaced
2284
# will yield this path if needed.
2286
elif current_path_info is None:
2287
# no path is fine: the per entry code will handle it.
2288
result = _process_entry(current_entry, current_path_info)
2289
if result is not None:
2290
if result is not uninteresting:
2292
elif (current_entry[0][1] != current_path_info[1]
2293
or current_entry[1][target_index][0] in 'ar'):
2294
# The current path on disk doesn't match the dirblock
2295
# record. Either the dirblock is marked as absent, or
2296
# the file on disk is not present at all in the
2297
# dirblock. Either way, report about the dirblock
2298
# entry, and let other code handle the filesystem one.
2300
# Compare the basename for these files to determine
2302
if current_path_info[1] < current_entry[0][1]:
2303
# extra file on disk: pass for now, but only
2304
# increment the path, not the entry
2305
advance_entry = False
2307
# entry referring to file not present on disk.
2308
# advance the entry only, after processing.
2309
result = _process_entry(current_entry, None)
2310
if result is not None:
2311
if result is not uninteresting:
2313
advance_path = False
2315
result = _process_entry(current_entry, current_path_info)
2316
if result is not None:
2318
if result is not uninteresting:
2320
if advance_entry and current_entry is not None:
2322
if entry_index < len(current_block[1]):
2323
current_entry = current_block[1][entry_index]
2325
current_entry = None
2327
advance_entry = True # reset the advance flaga
2328
if advance_path and current_path_info is not None:
2329
if not path_handled:
2330
# unversioned in all regards
2331
if want_unversioned:
2332
new_executable = bool(
2333
stat.S_ISREG(current_path_info[3].st_mode)
2334
and stat.S_IEXEC & current_path_info[3].st_mode)
2336
(None, utf8_decode(current_path_info[0])[0]),
2340
(None, utf8_decode(current_path_info[1])[0]),
2341
(None, current_path_info[2]),
2342
(None, new_executable))
2343
# dont descend into this unversioned path if it is
2345
if current_path_info[2] in ('directory'):
2346
del current_dir_info[1][path_index]
2348
# dont descend the disk iterator into any tree
2350
if current_path_info[2] == 'tree-reference':
2351
del current_dir_info[1][path_index]
2354
if path_index < len(current_dir_info[1]):
2355
current_path_info = current_dir_info[1][path_index]
2356
if current_path_info[2] == 'directory':
2357
if self.target._directory_is_tree_reference(
2358
current_path_info[0].decode('utf8')):
2359
current_path_info = current_path_info[:2] + \
2360
('tree-reference',) + current_path_info[3:]
2362
current_path_info = None
2363
path_handled = False
2365
advance_path = True # reset the advance flagg.
2366
if current_block is not None:
2368
if (block_index < len(state._dirblocks) and
2369
osutils.is_inside(current_root, state._dirblocks[block_index][0])):
2370
current_block = state._dirblocks[block_index]
2372
current_block = None
2373
if current_dir_info is not None:
2375
current_dir_info = dir_iterator.next()
2376
except StopIteration:
2377
current_dir_info = None
2381
def is_compatible(source, target):
2382
# the target must be a dirstate working tree
2383
if not isinstance(target, WorkingTree4):
2385
# the source must be a revtreee or dirstate rev tree.
2386
if not isinstance(source,
2387
(revisiontree.RevisionTree, DirStateRevisionTree)):
2389
# the source revid must be in the target dirstate
2390
if not (source._revision_id == NULL_REVISION or
2391
source._revision_id in target.get_parent_ids()):
2392
# TODO: what about ghosts? it may well need to
2393
# check for them explicitly.
2397
InterTree.register_optimiser(InterDirStateTree)
2400
class Converter3to4(object):
2401
"""Perform an in-place upgrade of format 3 to format 4 trees."""
2404
self.target_format = WorkingTreeFormat4()
2406
def convert(self, tree):
2407
# lock the control files not the tree, so that we dont get tree
2408
# on-unlock behaviours, and so that noone else diddles with the
2409
# tree during upgrade.
2410
tree._control_files.lock_write()
2412
tree.read_working_inventory()
2413
self.create_dirstate_data(tree)
2414
self.update_format(tree)
2415
self.remove_xml_files(tree)
2417
tree._control_files.unlock()
2419
def create_dirstate_data(self, tree):
2420
"""Create the dirstate based data for tree."""
2421
local_path = tree.bzrdir.get_workingtree_transport(None
2422
).local_abspath('dirstate')
2423
state = dirstate.DirState.from_tree(tree, local_path)
2427
def remove_xml_files(self, tree):
2428
"""Remove the oldformat 3 data."""
2429
transport = tree.bzrdir.get_workingtree_transport(None)
2430
for path in ['basis-inventory-cache', 'inventory', 'last-revision',
2431
'pending-merges', 'stat-cache']:
2433
transport.delete(path)
2434
except errors.NoSuchFile:
2435
# some files are optional - just deal.
2438
def update_format(self, tree):
2439
"""Change the format marker."""
2440
tree._control_files.put_utf8('format',
2441
self.target_format.get_format_string())