1
1
# Copyright (C) 2005, 2006 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39
39
# At the moment they may alias the inventory and have old copies of it in
40
40
# memory. (Now done? -- mbp 20060309)
42
from binascii import hexlify
42
44
from copy import deepcopy
43
45
from cStringIO import StringIO
55
from bzrlib import bzrdir, errors, ignores, osutils, urlutils
50
56
from bzrlib.atomicfile import AtomicFile
51
from bzrlib.branch import (Branch,
53
58
from bzrlib.conflicts import Conflict, ConflictList, CONFLICT_SUFFIXES
54
import bzrlib.bzrdir as bzrdir
55
59
from bzrlib.decorators import needs_read_lock, needs_write_lock
56
import bzrlib.errors as errors
57
60
from bzrlib.errors import (BzrCheckError,
59
62
ConflictFormatError,
61
63
WeaveRevisionNotPresent,
90
91
from bzrlib.progress import DummyProgress, ProgressPhase
91
92
from bzrlib.revision import NULL_REVISION
92
93
from bzrlib.rio import RioReader, rio_file, Stanza
93
from bzrlib.symbol_versioning import *
94
from bzrlib.textui import show_status
94
from bzrlib.symbol_versioning import (deprecated_passed,
100
from bzrlib.trace import mutter, note
96
101
from bzrlib.transform import build_tree
97
from bzrlib.trace import mutter, note
98
102
from bzrlib.transport import get_transport
99
103
from bzrlib.transport.local import LocalTransport
104
from bzrlib.textui import show_status
101
107
import bzrlib.xml5
110
# the regex removes any weird characters; we don't escape them
111
# but rather just pull them out
112
_gen_file_id_re = re.compile(r'[^\w.]')
113
_gen_id_suffix = None
117
def _next_id_suffix():
118
"""Create a new file id suffix that is reasonably unique.
120
On the first call we combine the current time with 64 bits of randomness
121
to give a highly probably globally unique number. Then each call in the same
122
process adds 1 to a serial number we append to that unique value.
124
# XXX TODO: change bzrlib.add.smart_add to call workingtree.add() rather
125
# than having to move the id randomness out of the inner loop like this.
126
# XXX TODO: for the global randomness this uses we should add the thread-id
127
# before the serial #.
128
global _gen_id_suffix, _gen_id_serial
129
if _gen_id_suffix is None:
130
_gen_id_suffix = "-%s-%s-" % (compact_date(time()), rand_chars(16))
132
return _gen_id_suffix + str(_gen_id_serial)
104
135
def gen_file_id(name):
105
"""Return new file id.
107
This should probably generate proper UUIDs, but for the moment we
108
cope with just randomness because running uuidgen every time is
111
from binascii import hexlify
112
from time import time
115
idx = name.rfind('/')
117
name = name[idx+1 : ]
118
idx = name.rfind('\\')
120
name = name[idx+1 : ]
122
# make it not a hidden file
123
name = name.lstrip('.')
125
# remove any wierd characters; we don't escape them but rather
127
name = re.sub(r'[^\w.]', '', name)
129
s = hexlify(rand_bytes(8))
130
return '-'.join((name, compact_date(time()), s))
136
"""Return new file id for the basename 'name'.
138
The uniqueness is supplied from _next_id_suffix.
140
# The real randomness is in the _next_id_suffix, the
141
# rest of the identifier is just to be nice.
143
# 1) Remove non-ascii word characters to keep the ids portable
144
# 2) squash to lowercase, so the file id doesn't have to
145
# be escaped (case insensitive filesystems would bork for ids
146
# that only differred in case without escaping).
147
# 3) truncate the filename to 20 chars. Long filenames also bork on some
149
# 4) Removing starting '.' characters to prevent the file ids from
150
# being considered hidden.
151
ascii_word_only = _gen_file_id_re.sub('', name.lower())
152
short_no_dots = ascii_word_only.lstrip('.')[:20]
153
return short_no_dots + _next_id_suffix()
133
156
def gen_root_id():
218
241
self.bzrdir = _bzrdir
219
242
if not _internal:
220
243
# not created via open etc.
221
warn("WorkingTree() is deprecated as of bzr version 0.8. "
244
warnings.warn("WorkingTree() is deprecated as of bzr version 0.8. "
222
245
"Please use bzrdir.open_workingtree or WorkingTree.open().",
223
246
DeprecationWarning,
225
248
wt = WorkingTree.open(basedir)
226
self.branch = wt.branch
249
self._branch = wt.branch
227
250
self.basedir = wt.basedir
228
251
self._control_files = wt._control_files
229
252
self._hashcache = wt._hashcache
238
261
mutter("opening working tree %r", basedir)
239
262
if deprecated_passed(branch):
240
263
if not _internal:
241
warn("WorkingTree(..., branch=XXX) is deprecated as of bzr 0.8."
264
warnings.warn("WorkingTree(..., branch=XXX) is deprecated as of bzr 0.8."
242
265
" Please use bzrdir.open_workingtree() or"
243
266
" WorkingTree.open().",
244
267
DeprecationWarning,
270
self._branch = branch
249
self.branch = self.bzrdir.open_branch()
250
assert isinstance(self.branch, Branch), \
251
"branch %r is not a Branch" % self.branch
272
self._branch = self.bzrdir.open_branch()
252
273
self.basedir = realpath(basedir)
253
274
# if branch is at our basedir and is a format 6 or less
254
275
if isinstance(self._format, WorkingTreeFormat2):
255
276
# share control object
256
277
self._control_files = self.branch.control_files
258
# only ready for format 3
259
assert isinstance(self._format, WorkingTreeFormat3)
279
# assume all other formats have their own control files.
260
280
assert isinstance(_control_files, LockableFiles), \
261
281
"_control_files must be a LockableFiles, not %r" \
267
287
# if needed, or, when the cache sees a change, append it to the hash
268
288
# cache file, and have the parser take the most recent entry for a
269
289
# given path only.
270
cache_filename = self.bzrdir.get_workingtree_transport(None).abspath('stat-cache')
290
cache_filename = self.bzrdir.get_workingtree_transport(None).local_abspath('stat-cache')
271
291
hc = self._hashcache = HashCache(basedir, cache_filename, self._control_files._file_mode)
273
293
# is this scan needed ? it makes things kinda slow.
276
296
if hc.needs_write:
277
297
mutter("write hc")
283
303
self._set_inventory(_inventory)
306
fget=lambda self: self._branch,
307
doc="""The branch this WorkingTree is connected to.
309
This cannot be set - it is reflective of the actual disk structure
310
the working tree has been constructed from.
313
def break_lock(self):
314
"""Break a lock if one is present from another instance.
316
Uses the ui factory to ask for confirmation if the lock may be from
319
This will probe the repository for its lock as well.
321
self._control_files.break_lock()
322
self.branch.break_lock()
285
324
def _set_inventory(self, inv):
325
assert inv.root is not None
286
326
self._inventory = inv
287
327
self.path2id = self._inventory.path2id
289
329
def is_control_filename(self, filename):
290
330
"""True if filename is the name of a control file in this tree.
332
:param filename: A filename within the tree. This is a relative path
333
from the root of this tree.
292
335
This is true IF and ONLY IF the filename is part of the meta data
293
336
that bzr controls in this tree. I.E. a random .bzr directory placed
294
337
on disk will not be a control file for this tree.
297
self.bzrdir.transport.relpath(self.abspath(filename))
299
except errors.PathNotChild:
339
return self.bzrdir.is_control_filename(filename)
303
342
def open(path=None, _unsupported=False):
319
358
run into /. If there isn't one, raises NotBranchError.
320
359
TODO: give this a new exception.
321
360
If there is one, it is returned, along with the unused portion of path.
362
:return: The WorkingTree that contains 'path', and the rest of path
365
path = osutils.getcwd()
325
366
control, relpath = bzrdir.BzrDir.open_containing(path)
326
368
return control.open_workingtree(), relpath
359
401
xml = self.read_basis_inventory()
360
402
inv = bzrlib.xml5.serializer_v5.read_inventory_from_string(xml)
403
inv.root.revision = revision_id
361
404
except NoSuchFile:
363
406
if inv is not None and inv.revision_id == revision_id:
364
407
return bzrlib.tree.RevisionTree(self.branch.repository, inv,
366
409
# FIXME? RBC 20060403 should we cache the inventory here ?
367
return self.branch.repository.revision_tree(revision_id)
411
return self.branch.repository.revision_tree(revision_id)
412
except errors.RevisionNotPresent:
413
# the basis tree *may* be a ghost or a low level error may have
414
# occured. If the revision is present, its a problem, if its not
416
if self.branch.repository.has_revision(revision_id):
418
# the basis tree is a ghost
419
return self.branch.repository.revision_tree(None)
370
422
@deprecated_method(zero_eight)
405
457
return bzrdir.BzrDir.create_standalone_workingtree(directory)
407
def relpath(self, abs):
408
"""Return the local path portion from a given absolute path."""
409
return relpath(self.basedir, abs)
459
def relpath(self, path):
460
"""Return the local path portion from a given path.
462
The path may be absolute or relative. If its a relative path it is
463
interpreted relative to the python current working directory.
465
return relpath(self.basedir, path)
411
467
def has_filename(self, filename):
412
return bzrlib.osutils.lexists(self.abspath(filename))
468
return osutils.lexists(self.abspath(filename))
414
470
def get_file(self, file_id):
415
471
return self.get_file_byname(self.id2path(file_id))
473
def get_file_text(self, file_id):
474
return self.get_file(file_id).read()
417
476
def get_file_byname(self, filename):
418
477
return file(self.abspath(filename), 'rb')
479
def get_parent_ids(self):
480
"""See Tree.get_parent_ids.
482
This implementation reads the pending merges list and last_revision
483
value and uses that to decide what the parents list should be.
485
last_rev = self.last_revision()
490
other_parents = self.pending_merges()
491
return parents + other_parents
420
493
def get_root_id(self):
421
494
"""Return the id of this trees root"""
422
495
inv = self.read_working_inventory()
471
544
# but with branch a kwarg now, passing in args as is results in the
472
545
#message being used for the branch
473
546
args = (DEPRECATED_PARAMETER, message, ) + args
474
Commit().commit(working_tree=self, revprops=revprops, *args, **kwargs)
547
committed_id = Commit().commit( working_tree=self, revprops=revprops,
475
549
self._set_inventory(self.read_working_inventory())
477
552
def id2abspath(self, file_id):
478
553
return self.abspath(self.id2path(file_id))
496
571
return os.path.getsize(self.id2abspath(file_id))
499
def get_file_sha1(self, file_id):
500
path = self._inventory.id2path(file_id)
574
def get_file_sha1(self, file_id, path=None):
576
path = self._inventory.id2path(file_id)
501
577
return self._hashcache.get_sha1(path)
503
def is_executable(self, file_id):
504
if not supports_executable():
579
def get_file_mtime(self, file_id, path=None):
581
path = self._inventory.id2path(file_id)
582
return os.lstat(self.abspath(path)).st_mtime
584
if not supports_executable():
585
def is_executable(self, file_id, path=None):
505
586
return self._inventory[file_id].executable
507
path = self._inventory.id2path(file_id)
588
def is_executable(self, file_id, path=None):
590
path = self._inventory.id2path(file_id)
508
591
mode = os.lstat(self.abspath(path)).st_mode
509
return bool(stat.S_ISREG(mode) and stat.S_IEXEC&mode)
592
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
511
594
@needs_write_lock
512
595
def add(self, files, ids=None):
555
638
raise BzrError("cannot add top-level %r" % f)
557
640
fullpath = normpath(self.abspath(f))
560
642
kind = file_kind(fullpath)
561
643
except OSError, e:
562
644
if e.errno == errno.ENOENT:
563
645
raise NoSuchFile(fullpath)
564
# maybe something better?
565
raise BzrError('cannot add: not a regular file, symlink or directory: %s' % quotefn(f))
567
646
if not InventoryEntry.versionable_kind(kind):
568
raise BzrError('cannot add: not a versionable file ('
569
'i.e. regular file, symlink or directory): %s' % quotefn(f))
647
raise errors.BadFileKindError(filename=f, kind=kind)
571
648
if file_id is None:
572
file_id = gen_file_id(f)
573
inv.add_path(f, kind=kind, file_id=file_id)
649
inv.add_path(f, kind=kind)
651
inv.add_path(f, kind=kind, file_id=file_id)
575
mutter("add file %s file_id:{%s} kind=%r" % (f, file_id, kind))
576
653
self._write_inventory(inv)
578
655
@needs_write_lock
656
def add_parent_tree_id(self, revision_id, allow_leftmost_as_ghost=False):
657
"""Add revision_id as a parent.
659
This is equivalent to retrieving the current list of parent ids
660
and setting the list to its value plus revision_id.
662
:param revision_id: The revision id to add to the parent list. It may
665
self.set_parent_ids(self.get_parent_ids() + [revision_id],
666
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
669
def add_parent_tree(self, parent_tuple, allow_leftmost_as_ghost=False):
670
"""Add revision_id, tree tuple as a parent.
672
This is equivalent to retrieving the current list of parent trees
673
and setting the list to its value plus parent_tuple. See also
674
add_parent_tree_id - if you only have a parent id available it will be
675
simpler to use that api. If you have the parent already available, using
676
this api is preferred.
678
:param parent_tuple: The (revision id, tree) to add to the parent list. If the revision_id is a ghost, pass None for the tree.
680
self.set_parent_ids(self.get_parent_ids() + [parent_tuple[0]],
681
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
579
684
def add_pending_merge(self, *revision_ids):
580
685
# TODO: Perhaps should check at this point that the
581
686
# history of the revision is actually present?
582
687
p = self.pending_merges()
688
existing_parents = self.get_parent_ids()
584
690
for rev_id in revision_ids:
693
if rev_id in existing_parents:
610
716
@needs_write_lock
717
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
718
"""Set the parent ids to revision_ids.
720
See also set_parent_trees. This api will try to retrieve the tree data
721
for each element of revision_ids from the trees repository. If you have
722
tree data already available, it is more efficient to use
723
set_parent_trees rather than set_parent_ids. set_parent_ids is however
724
an easier API to use.
726
:param revision_ids: The revision_ids to set as the parent ids of this
727
working tree. Any of these may be ghosts.
730
for rev_id in revision_ids:
733
(rev_id, self.branch.repository.revision_tree(rev_id)))
734
except errors.RevisionNotPresent:
735
trees.append((rev_id, None))
737
self.set_parent_trees(trees,
738
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
741
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
742
"""Set the parents of the working tree.
744
:param parents_list: A list of (revision_id, tree) tuples.
745
If tree is None, then that element is treated as an unreachable
746
parent tree - i.e. a ghost.
748
parent = parents_list[:1]
750
if (not allow_leftmost_as_ghost and not
751
self.branch.repository.has_revision(parent[0][0])):
752
raise errors.GhostRevision(parent[0][0])
753
self.set_last_revision(parent[0][0])
755
self.set_last_revision(None)
756
merges = parents_list[1:]
757
self.set_pending_merges([revid for revid, tree in merges])
611
760
def set_pending_merges(self, rev_list):
761
if self.last_revision() is None:
762
new_last_list = rev_list[:1]
763
rev_list = rev_list[1:]
765
self.set_last_revision(new_last_list[0])
612
766
self._control_files.put_utf8('pending-merges', '\n'.join(rev_list))
614
768
@needs_write_lock
666
820
Skips the control directory.
668
822
inv = self._inventory
670
def descend(from_dir_relpath, from_dir_id, dp):
823
# Convert these into local objects to save lookup times
824
pathjoin = osutils.pathjoin
825
file_kind = osutils.file_kind
827
# transport.base ends in a slash, we want the piece
828
# between the last two slashes
829
transport_base_dir = self.bzrdir.transport.base.rsplit('/', 2)[1]
831
fk_entries = {'directory':TreeDirectory, 'file':TreeFile, 'symlink':TreeLink}
833
# directory file_id, relative path, absolute path, reverse sorted children
834
children = os.listdir(self.basedir)
836
# jam 20060527 The kernel sized tree seems equivalent whether we
837
# use a deque and popleft to keep them sorted, or if we use a plain
838
# list and just reverse() them.
839
children = collections.deque(children)
840
stack = [(inv.root.file_id, u'', self.basedir, children)]
842
from_dir_id, from_dir_relpath, from_dir_abspath, children = stack[-1]
845
f = children.popleft()
674
846
## TODO: If we find a subdirectory with its own .bzr
675
847
## directory, then that is a separate tree and we
676
848
## should exclude it.
678
850
# the bzrdir for this tree
679
if self.bzrdir.transport.base.endswith(f + '/'):
851
if transport_base_dir == f:
683
fp = appendpath(from_dir_relpath, f)
854
# we know that from_dir_relpath and from_dir_abspath never end in a slash
855
# and 'f' doesn't begin with one, we can do a string op, rather
856
# than the checks of pathjoin(), all relative paths will have an extra slash
858
fp = from_dir_relpath + '/' + f
686
fap = appendpath(dp, f)
861
fap = from_dir_abspath + '/' + f
688
863
f_ie = inv.get_child(from_dir_id, f)
691
elif self.is_ignored(fp):
866
elif self.is_ignored(fp[1:]):
869
# we may not have found this file, because of a unicode issue
870
f_norm, can_access = osutils.normalized_filename(f)
871
if f == f_norm or not can_access:
872
# No change, so treat this file normally
875
# this file can be accessed by a normalized path
876
# check again if it is versioned
877
# these lines are repeated here for performance
879
fp = from_dir_relpath + '/' + f
880
fap = from_dir_abspath + '/' + f
881
f_ie = inv.get_child(from_dir_id, f)
884
elif self.is_ignored(fp[1:]):
696
889
fk = file_kind(fap)
704
897
# make a last minute entry
899
yield fp[1:], c, fk, f_ie.file_id, f_ie
708
if fk == 'directory':
709
entry = TreeDirectory()
712
elif fk == 'symlink':
902
yield fp[1:], c, fk, None, fk_entries[fk]()
904
yield fp[1:], c, fk, None, TreeEntry()
717
yield fp, c, fk, (f_ie and f_ie.file_id), entry
719
907
if fk != 'directory':
723
# don't descend unversioned directories
726
for ff in descend(fp, f_ie.file_id, fap):
910
# But do this child first
911
new_children = os.listdir(fap)
913
new_children = collections.deque(new_children)
914
stack.append((f_ie.file_id, fp, fap, new_children))
915
# Break out of inner loop, so that we start outer loop with child
918
# if we finished all children, pop it off the stack
729
for f in descend(u'', inv.root.file_id, self.basedir):
732
922
@needs_write_lock
733
923
def move(self, from_paths, to_name):
853
1043
These are files in the working directory that are not versioned or
854
1044
control files or ignored.
856
>>> from bzrlib.bzrdir import ScratchDir
857
>>> d = ScratchDir(files=['foo', 'foo~'])
858
>>> b = d.open_branch()
859
>>> tree = d.open_workingtree()
860
>>> map(str, tree.unknowns())
863
>>> list(b.unknowns())
865
>>> tree.remove('foo')
866
>>> list(b.unknowns())
869
1046
for subp in self.extras():
870
1047
if not self.is_ignored(subp):
940
1118
for subf in os.listdir(dirabs):
942
and (subf not in dir_entry.children)):
1121
if subf not in dir_entry.children:
1122
subf_norm, can_access = osutils.normalized_filename(subf)
1123
if subf_norm != subf and can_access:
1124
if subf_norm not in dir_entry.children:
1125
fl.append(subf_norm)
947
subp = appendpath(path, subf)
1131
subp = pathjoin(path, subf)
1134
def _translate_ignore_rule(self, rule):
1135
"""Translate a single ignore rule to a regex.
1137
There are two types of ignore rules. Those that do not contain a / are
1138
matched against the tail of the filename (that is, they do not care
1139
what directory the file is in.) Rules which do contain a slash must
1140
match the entire path. As a special case, './' at the start of the
1141
string counts as a slash in the string but is removed before matching
1142
(e.g. ./foo.c, ./src/foo.c)
1144
:return: The translated regex.
1146
if rule[:2] in ('./', '.\\'):
1148
result = fnmatch.translate(rule[2:])
1149
elif '/' in rule or '\\' in rule:
1151
result = fnmatch.translate(rule)
1153
# default rule style.
1154
result = "(?:.*/)?(?!.*/)" + fnmatch.translate(rule)
1155
assert result[-1] == '$', "fnmatch.translate did not add the expected $"
1156
return "(" + result + ")"
1158
def _combine_ignore_rules(self, rules):
1159
"""Combine a list of ignore rules into a single regex object.
1161
Each individual rule is combined with | to form a big regex, which then
1162
has $ added to it to form something like ()|()|()$. The group index for
1163
each subregex's outermost group is placed in a dictionary mapping back
1164
to the rule. This allows quick identification of the matching rule that
1166
:return: a list of the compiled regex and the matching-group index
1167
dictionaries. We return a list because python complains if you try to
1168
combine more than 100 regexes.
1173
translated_rules = []
1175
translated_rule = self._translate_ignore_rule(rule)
1176
compiled_rule = re.compile(translated_rule)
1177
groups[next_group] = rule
1178
next_group += compiled_rule.groups
1179
translated_rules.append(translated_rule)
1180
if next_group == 99:
1181
result.append((re.compile("|".join(translated_rules)), groups))
1184
translated_rules = []
1185
if len(translated_rules):
1186
result.append((re.compile("|".join(translated_rules)), groups))
951
1189
def ignored_files(self):
952
1190
"""Yield list of PATH, IGNORE_PATTERN"""
959
1196
def get_ignore_list(self):
960
1197
"""Return list of ignore patterns.
962
1199
Cached in the Tree object after the first call.
964
if hasattr(self, '_ignorelist'):
965
return self._ignorelist
967
l = bzrlib.DEFAULT_IGNORE[:]
1201
ignoreset = getattr(self, '_ignoreset', None)
1202
if ignoreset is not None:
1205
ignore_globs = set(bzrlib.DEFAULT_IGNORE)
1206
ignore_globs.update(ignores.get_runtime_ignores())
1208
ignore_globs.update(ignores.get_user_ignores())
968
1210
if self.has_filename(bzrlib.IGNORE_FILENAME):
969
1211
f = self.get_file_byname(bzrlib.IGNORE_FILENAME)
970
l.extend([line.rstrip("\n\r") for line in f.readlines()])
1213
ignore_globs.update(ignores.parse_ignore_file(f))
1217
self._ignoreset = ignore_globs
1218
self._ignore_regex = self._combine_ignore_rules(ignore_globs)
1221
def _get_ignore_rules_as_regex(self):
1222
"""Return a regex of the ignore rules and a mapping dict.
1224
:return: (ignore rules compiled regex, dictionary mapping rule group
1225
indices to original rule.)
1227
if getattr(self, '_ignoreset', None) is None:
1228
self.get_ignore_list()
1229
return self._ignore_regex
975
1231
def is_ignored(self, filename):
976
1232
r"""Check whether the filename matches an ignore pattern.
990
1246
# treat dotfiles correctly and allows * to match /.
991
1247
# Eventually it should be replaced with something more
994
for pat in self.get_ignore_list():
995
if '/' in pat or '\\' in pat:
997
# as a special case, you can put ./ at the start of a
998
# pattern; this is good to match in the top-level
1001
if (pat[:2] == './') or (pat[:2] == '.\\'):
1005
if fnmatch.fnmatchcase(filename, newpat):
1008
if fnmatch.fnmatchcase(splitpath(filename)[-1], pat):
1250
rules = self._get_ignore_rules_as_regex()
1251
for regex, mapping in rules:
1252
match = regex.match(filename)
1253
if match is not None:
1254
# one or more of the groups in mapping will have a non-None
1256
groups = match.groups()
1257
rules = [mapping[group] for group in
1258
mapping if groups[group] is not None]
1013
1262
def kind(self, file_id):
1014
1263
return file_kind(self.id2abspath(file_id))
1059
1314
if new_revision is None:
1060
1315
self.branch.set_revision_history([])
1062
# current format is locked in with the branch
1063
revision_history = self.branch.revision_history()
1065
position = revision_history.index(new_revision)
1067
raise errors.NoSuchRevision(self.branch, new_revision)
1068
self.branch.set_revision_history(revision_history[:position + 1])
1318
self.branch.generate_revision_history(new_revision)
1319
except errors.NoSuchRevision:
1320
# not present in the repo - dont try to set it deeper than the tip
1321
self.branch.set_revision_history([new_revision])
1071
1324
def _cache_basis_inventory(self, new_revision):
1072
1325
"""Cache new_revision as the basis inventory."""
1326
# TODO: this should allow the ready-to-use inventory to be passed in,
1327
# as commit already has that ready-to-use [while the format is the
1074
1330
# this double handles the inventory - unpack and repack -
1075
1331
# but is easier to understand. We can/should put a conditional
1076
1332
# in here based on whether the inventory is in the latest format
1077
1333
# - perhaps we should repack all inventories on a repository
1079
inv = self.branch.repository.get_inventory(new_revision)
1080
inv.revision_id = new_revision
1081
xml = bzrlib.xml5.serializer_v5.write_inventory_to_string(inv)
1335
# the fast path is to copy the raw xml from the repository. If the
1336
# xml contains 'revision_id="', then we assume the right
1337
# revision_id is set. We must check for this full string, because a
1338
# root node id can legitimately look like 'revision_id' but cannot
1340
xml = self.branch.repository.get_inventory_xml(new_revision)
1341
if not 'revision_id="' in xml.split('\n', 1)[0]:
1342
inv = self.branch.repository.deserialise_inventory(
1344
inv.revision_id = new_revision
1345
xml = bzrlib.xml5.serializer_v5.write_inventory_to_string(inv)
1346
assert isinstance(xml, str), 'serialised xml must be bytestring.'
1083
1347
path = self._basis_inventory_name()
1084
self._control_files.put_utf8(path, xml)
1085
except WeaveRevisionNotPresent:
1349
self._control_files.put(path, sio)
1350
except (errors.NoSuchRevision, errors.RevisionNotPresent):
1088
1353
def read_basis_inventory(self):
1089
1354
"""Read the cached basis inventory."""
1090
1355
path = self._basis_inventory_name()
1091
return self._control_files.get_utf8(path).read()
1356
return self._control_files.get(path).read()
1093
1358
@needs_read_lock
1094
1359
def read_working_inventory(self):
1129
1394
# TODO: Perhaps make this just a warning, and continue?
1130
1395
# This tends to happen when
1131
1396
raise NotVersionedError(path=f)
1132
mutter("remove inventory entry %s {%s}", quotefn(f), fid)
1134
1398
# having remove it, it must be either ignored or unknown
1135
1399
if self.is_ignored(f):
1136
1400
new_status = 'I'
1138
1402
new_status = '?'
1139
show_status(new_status, inv[fid].kind, quotefn(f))
1403
show_status(new_status, inv[fid].kind, f, to_file=to_file)
1142
1406
self._write_inventory(inv)
1204
1468
between multiple working trees, i.e. via shared storage, then we
1205
1469
would probably want to lock both the local tree, and the branch.
1207
# FIXME: We want to write out the hashcache only when the last lock on
1208
# this working copy is released. Peeking at the lock count is a bit
1209
# of a nasty hack; probably it's better to have a transaction object,
1210
# which can do some finalization when it's either successfully or
1211
# unsuccessfully completed. (Denys's original patch did that.)
1212
# RBC 20060206 hookinhg into transaction will couple lock and transaction
1213
# wrongly. Hookinh into unllock on the control files object is fine though.
1215
# TODO: split this per format so there is no ugly if block
1216
if self._hashcache.needs_write and (
1217
# dedicated lock files
1218
self._control_files._lock_count==1 or
1220
(self._control_files is self.branch.control_files and
1221
self._control_files._lock_count==3)):
1222
self._hashcache.write()
1223
# reverse order of locking.
1224
result = self._control_files.unlock()
1226
self.branch.unlock()
1471
raise NotImplementedError(self.unlock)
1230
1473
@needs_write_lock
1231
1474
def update(self):
1306
1550
if file_kind(self.abspath(conflicted)) != "file":
1309
if e.errno == errno.ENOENT:
1552
except errors.NoSuchFile:
1313
1554
if text is True:
1314
1555
for suffix in ('.THIS', '.OTHER'):
1316
1557
kind = file_kind(self.abspath(conflicted+suffix))
1318
if e.errno == errno.ENOENT:
1560
except errors.NoSuchFile:
1326
1564
ctype = {True: 'text conflict', False: 'contents conflict'}[text]
1327
1565
conflicts.append(Conflict.factory(ctype, path=conflicted,
1329
1567
return conflicts
1570
class WorkingTree2(WorkingTree):
1571
"""This is the Format 2 working tree.
1573
This was the first weave based working tree.
1574
- uses os locks for locking.
1575
- uses the branch last-revision.
1579
# we share control files:
1580
if self._hashcache.needs_write and self._control_files._lock_count==3:
1581
self._hashcache.write()
1582
# reverse order of locking.
1584
return self._control_files.unlock()
1586
self.branch.unlock()
1332
1589
class WorkingTree3(WorkingTree):
1333
1590
"""This is the Format 3 working tree.
1486
1755
"""See WorkingTreeFormat.get_format_description()."""
1487
1756
return "Working tree format 2"
1758
def stub_initialize_remote(self, control_files):
1759
"""As a special workaround create critical control files for a remote working tree
1761
This ensures that it can later be updated and dealt with locally,
1762
since BzrDirFormat6 and BzrDirFormat5 cannot represent dirs with
1763
no working tree. (See bug #43064).
1767
bzrlib.xml5.serializer_v5.write_inventory(inv, sio)
1769
control_files.put('inventory', sio)
1771
control_files.put_utf8('pending-merges', '')
1489
1774
def initialize(self, a_bzrdir, revision_id=None):
1490
1775
"""See WorkingTreeFormat.initialize()."""
1491
1776
if not isinstance(a_bzrdir.transport, LocalTransport):
1577
1862
transport = a_bzrdir.get_workingtree_transport(self)
1578
1863
control_files = self._open_control_files(a_bzrdir)
1579
1864
control_files.create_lock()
1865
control_files.lock_write()
1580
1866
control_files.put_utf8('format', self.get_format_string())
1581
1867
branch = a_bzrdir.open_branch()
1582
1868
if revision_id is None:
1583
1869
revision_id = branch.last_revision()
1584
1870
inv = Inventory()
1585
wt = WorkingTree3(a_bzrdir.root_transport.base,
1871
wt = WorkingTree3(a_bzrdir.root_transport.local_abspath('.'),
1588
1874
_internal=True,
1590
1876
_bzrdir=a_bzrdir,
1591
1877
_control_files=control_files)
1592
wt._write_inventory(inv)
1593
wt.set_root_id(inv.root.file_id)
1594
wt.set_last_revision(revision_id)
1595
wt.set_pending_merges([])
1596
build_tree(wt.basis_tree(), wt)
1880
wt._write_inventory(inv)
1881
wt.set_root_id(inv.root.file_id)
1882
wt.set_last_revision(revision_id)
1883
wt.set_pending_merges([])
1884
build_tree(wt.basis_tree(), wt)
1887
control_files.unlock()
1599
1890
def __init__(self):
1611
1902
raise NotImplementedError
1612
1903
if not isinstance(a_bzrdir.transport, LocalTransport):
1613
1904
raise errors.NotLocalUrl(a_bzrdir.transport.base)
1614
control_files = self._open_control_files(a_bzrdir)
1615
return WorkingTree3(a_bzrdir.root_transport.base,
1905
return self._open(a_bzrdir, self._open_control_files(a_bzrdir))
1907
def _open(self, a_bzrdir, control_files):
1908
"""Open the tree itself.
1910
:param a_bzrdir: the dir for the tree.
1911
:param control_files: the control files for the tree.
1913
return WorkingTree3(a_bzrdir.root_transport.local_abspath('.'),
1616
1914
_internal=True,
1618
1916
_bzrdir=a_bzrdir,
1645
1943
self._transport_readonly_server = transport_readonly_server
1646
1944
self._formats = formats
1946
def _clone_test(self, test, bzrdir_format, workingtree_format, variation):
1947
"""Clone test for adaption."""
1948
new_test = deepcopy(test)
1949
new_test.transport_server = self._transport_server
1950
new_test.transport_readonly_server = self._transport_readonly_server
1951
new_test.bzrdir_format = bzrdir_format
1952
new_test.workingtree_format = workingtree_format
1953
def make_new_test_id():
1954
new_id = "%s(%s)" % (test.id(), variation)
1955
return lambda: new_id
1956
new_test.id = make_new_test_id()
1648
1959
def adapt(self, test):
1649
1960
from bzrlib.tests import TestSuite
1650
1961
result = TestSuite()
1651
1962
for workingtree_format, bzrdir_format in self._formats:
1652
new_test = deepcopy(test)
1653
new_test.transport_server = self._transport_server
1654
new_test.transport_readonly_server = self._transport_readonly_server
1655
new_test.bzrdir_format = bzrdir_format
1656
new_test.workingtree_format = workingtree_format
1657
def make_new_test_id():
1658
new_id = "%s(%s)" % (new_test.id(), workingtree_format.__class__.__name__)
1659
return lambda: new_id
1660
new_test.id = make_new_test_id()
1963
new_test = self._clone_test(
1966
workingtree_format, workingtree_format.__class__.__name__)
1661
1967
result.addTest(new_test)