13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
33
index_entry_from_path,
34
index_entry_from_stat,
40
from dulwich.object_store import (
43
from dulwich.objects import (
52
conflicts as _mod_conflicts,
54
controldir as _mod_controldir,
60
revision as _mod_revision,
62
transport as _mod_transport,
30
class GitWorkingTree(workingtree.WorkingTree):
66
from ..decorators import (
69
from ..mutabletree import (
81
from .mapping import (
86
IGNORE_FILENAME = ".gitignore"
89
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
31
90
"""A Git working tree."""
33
def __init__(self, bzrdir, repo, branch):
34
self.basedir = bzrdir.transport.base
92
def __init__(self, controldir, repo, branch):
93
MutableGitIndexTree.__init__(self)
94
basedir = controldir.root_transport.local_abspath('.')
95
self.basedir = osutils.realpath(basedir)
96
self.controldir = controldir
36
97
self.repository = repo
98
self.store = self.repository._git.object_store
99
self.mapping = self.repository.get_mapping()
37
100
self._branch = branch
38
self._transport = bzrdir.transport
40
self.controldir = urlutils.join(self.repository._git.path, 'bzr')
43
os.makedirs(self.controldir)
44
os.makedirs(os.path.join(self.controldir, 'lock'))
48
self._control_files = lockable_files.LockableFiles(
49
transport.get_transport(self.controldir), 'lock', lockdir.LockDir)
101
self._transport = self.repository._git._controltransport
51
102
self._format = GitWorkingTreeFormat()
104
self._index_file = None
105
self.views = self._make_views()
106
self._rules_searcher = None
107
self._detect_case_handling()
110
def supports_tree_reference(self):
113
def supports_rename_tracking(self):
116
def _read_index(self):
117
self.index = Index(self.control_transport.local_abspath('index'))
118
self._index_dirty = False
53
120
def lock_read(self):
121
"""Lock the repository for read operations.
123
:return: A breezy.lock.LogicalLockResult.
125
if not self._lock_mode:
126
self._lock_mode = 'r'
130
self._lock_count += 1
131
self.branch.lock_read()
132
return lock.LogicalLockResult(self.unlock)
134
def _lock_write_tree(self):
135
if not self._lock_mode:
136
self._lock_mode = 'w'
139
self._index_file = GitFile(
140
self.control_transport.local_abspath('index'), 'wb')
142
raise errors.LockContention('index')
144
elif self._lock_mode == 'r':
145
raise errors.ReadOnlyError(self)
147
self._lock_count += 1
149
def lock_tree_write(self):
150
self.branch.lock_read()
152
self._lock_write_tree()
153
return lock.LogicalLockResult(self.unlock)
154
except BaseException:
158
def lock_write(self, token=None):
159
self.branch.lock_write()
161
self._lock_write_tree()
162
return lock.LogicalLockResult(self.unlock)
163
except BaseException:
168
return self._lock_count >= 1
170
def get_physical_lock_status(self):
173
def break_lock(self):
175
self.control_transport.delete('index.lock')
176
except errors.NoSuchFile:
178
self.branch.break_lock()
180
@only_raises(errors.LockNotHeld, errors.LockBroken)
59
def is_control_filename(self, path):
60
return os.path.basename(path) == ".git"
62
def _get_inventory(self):
63
return inventory.Inventory()
65
inventory = property(_get_inventory,
66
doc="Inventory of this Tree")
182
if not self._lock_count:
183
return lock.cant_unlock_not_held(self)
186
self._lock_count -= 1
187
if self._lock_count > 0:
189
if self._index_file is not None:
190
if self._index_dirty:
191
self._flush(self._index_file)
192
self._index_file.close()
194
# Something else already triggered a write of the index
195
# file by calling .flush()
196
self._index_file.abort()
197
self._index_file = None
198
self._lock_mode = None
206
def _detect_case_handling(self):
208
self._transport.stat(".git/cOnFiG")
209
except errors.NoSuchFile:
210
self.case_sensitive = True
212
self.case_sensitive = False
214
def merge_modified(self):
217
def set_merge_modified(self, modified_hashes):
218
raise errors.UnsupportedOperation(self.set_merge_modified, self)
220
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
221
self.set_parent_ids([p for p, t in parents_list])
223
def _set_merges_from_parent_ids(self, rhs_parent_ids):
225
merges = [self.branch.lookup_bzr_revision_id(
226
revid)[0] for revid in rhs_parent_ids]
227
except errors.NoSuchRevision as e:
228
raise errors.GhostRevisionUnusableHere(e.revision)
230
self.control_transport.put_bytes(
231
'MERGE_HEAD', b'\n'.join(merges),
232
mode=self.controldir._get_file_mode())
235
self.control_transport.delete('MERGE_HEAD')
236
except errors.NoSuchFile:
239
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
240
"""Set the parent ids to revision_ids.
242
See also set_parent_trees. This api will try to retrieve the tree data
243
for each element of revision_ids from the trees repository. If you have
244
tree data already available, it is more efficient to use
245
set_parent_trees rather than set_parent_ids. set_parent_ids is however
246
an easier API to use.
248
:param revision_ids: The revision_ids to set as the parent ids of this
249
working tree. Any of these may be ghosts.
251
with self.lock_tree_write():
252
self._check_parents_for_ghosts(
253
revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
254
for revision_id in revision_ids:
255
_mod_revision.check_not_reserved_id(revision_id)
257
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
259
if len(revision_ids) > 0:
260
self.set_last_revision(revision_ids[0])
262
self.set_last_revision(_mod_revision.NULL_REVISION)
264
self._set_merges_from_parent_ids(revision_ids[1:])
266
def get_parent_ids(self):
267
"""See Tree.get_parent_ids.
269
This implementation reads the pending merges list and last_revision
270
value and uses that to decide what the parents list should be.
272
last_rev = _mod_revision.ensure_null(self._last_revision())
273
if _mod_revision.NULL_REVISION == last_rev:
278
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
279
except errors.NoSuchFile:
282
for l in osutils.split_lines(merges_bytes):
283
revision_id = l.rstrip(b'\n')
285
self.branch.lookup_foreign_revision_id(revision_id))
288
def check_state(self):
289
"""Check that the working state is/isn't valid."""
292
def remove(self, files, verbose=False, to_file=None, keep_files=True,
294
"""Remove nominated files from the working tree metadata.
296
:param files: File paths relative to the basedir.
297
:param keep_files: If true, the files will also be kept.
298
:param force: Delete files and directories, even if they are changed
299
and even if the directories are not empty.
301
if not isinstance(files, list):
307
def backup(file_to_backup):
308
abs_path = self.abspath(file_to_backup)
309
backup_name = self.controldir._available_backup_name(
311
osutils.rename(abs_path, self.abspath(backup_name))
312
return "removed %s (but kept a copy: %s)" % (
313
file_to_backup, backup_name)
315
# Sort needed to first handle directory content before the directory
320
def recurse_directory_to_add_files(directory):
321
# Recurse directory and add all files
322
# so we can check if they have changed.
323
for parent_info, file_infos in self.walkdirs(directory):
324
for relpath, basename, kind, lstat, fileid, kind in file_infos:
325
# Is it versioned or ignored?
326
if self.is_versioned(relpath):
327
# Add nested content for deletion.
328
all_files.add(relpath)
330
# Files which are not versioned
331
# should be treated as unknown.
332
files_to_backup.append(relpath)
334
with self.lock_tree_write():
335
for filepath in files:
336
# Get file name into canonical form.
337
abspath = self.abspath(filepath)
338
filepath = self.relpath(abspath)
341
all_files.add(filepath)
342
recurse_directory_to_add_files(filepath)
344
files = list(all_files)
347
return # nothing to do
349
# Sort needed to first handle directory content before the
351
files.sort(reverse=True)
353
# Bail out if we are going to delete files we shouldn't
354
if not keep_files and not force:
355
for (file_id, path, content_change, versioned, parent_id, name,
356
kind, executable) in self.iter_changes(
357
self.basis_tree(), include_unchanged=True,
358
require_versioned=False, want_unversioned=True,
359
specific_files=files):
360
if versioned[0] is False:
361
# The record is unknown or newly added
362
files_to_backup.append(path[1])
363
files_to_backup.extend(
364
osutils.parent_directories(path[1]))
365
elif (content_change and (kind[1] is not None)
366
and osutils.is_inside_any(files, path[1])):
367
# Versioned and changed, but not deleted, and still
368
# in one of the dirs to be deleted.
369
files_to_backup.append(path[1])
370
files_to_backup.extend(
371
osutils.parent_directories(path[1]))
379
except errors.NoSuchFile:
382
abs_path = self.abspath(f)
384
# having removed it, it must be either ignored or unknown
385
if self.is_ignored(f):
389
kind_ch = osutils.kind_marker(kind)
390
to_file.write(new_status + ' ' + f + kind_ch + '\n')
392
message = "%s does not exist" % (f, )
395
if f in files_to_backup and not force:
398
if kind == 'directory':
399
osutils.rmtree(abs_path)
401
osutils.delete_any(abs_path)
402
message = "deleted %s" % (f,)
404
message = "removed %s" % (f,)
405
self._unversion_path(f)
407
# print only one message (if any) per file.
408
if message is not None:
410
self._versioned_dirs = None
412
def smart_add(self, file_list, recurse=True, action=None, save=True):
416
# expand any symlinks in the directory part, while leaving the
418
# only expanding if symlinks are supported avoids windows path bugs
419
if osutils.has_symlinks():
420
file_list = list(map(osutils.normalizepath, file_list))
422
conflicts_related = set()
423
for c in self.conflicts():
424
conflicts_related.update(c.associated_filenames())
430
def call_action(filepath, kind):
433
if action is not None:
434
parent_path = posixpath.dirname(filepath)
435
parent_id = self.path2id(parent_path)
436
parent_ie = self._get_dir_ie(parent_path, parent_id)
437
file_id = action(self, parent_ie, filepath, kind)
438
if file_id is not None:
439
raise workingtree.SettingFileIdUnsupported()
441
with self.lock_tree_write():
442
for filepath in osutils.canonical_relpaths(
443
self.basedir, file_list):
444
filepath, can_access = osutils.normalized_filename(filepath)
446
raise errors.InvalidNormalization(filepath)
448
abspath = self.abspath(filepath)
449
kind = osutils.file_kind(abspath)
450
if kind in ("file", "symlink"):
451
(index, subpath) = self._lookup_index(
452
filepath.encode('utf-8'))
456
call_action(filepath, kind)
458
self._index_add_entry(filepath, kind)
459
added.append(filepath)
460
elif kind == "directory":
461
(index, subpath) = self._lookup_index(
462
filepath.encode('utf-8'))
463
if subpath not in index:
464
call_action(filepath, kind)
466
user_dirs.append(filepath)
468
raise errors.BadFileKindError(filename=abspath, kind=kind)
469
for user_dir in user_dirs:
470
abs_user_dir = self.abspath(user_dir)
473
transport = _mod_transport.get_transport_from_path(
475
_mod_controldir.ControlDirFormat.find_format(transport)
477
except errors.NotBranchError:
479
except errors.UnsupportedFormatError:
484
trace.warning('skipping nested tree %r', abs_user_dir)
487
for name in os.listdir(abs_user_dir):
488
subp = os.path.join(user_dir, name)
489
if (self.is_control_filename(subp) or
490
self.mapping.is_special_file(subp)):
492
ignore_glob = self.is_ignored(subp)
493
if ignore_glob is not None:
494
ignored.setdefault(ignore_glob, []).append(subp)
496
abspath = self.abspath(subp)
497
kind = osutils.file_kind(abspath)
498
if kind == "directory":
499
user_dirs.append(subp)
501
(index, subpath) = self._lookup_index(
502
subp.encode('utf-8'))
506
if subp in conflicts_related:
508
call_action(subp, kind)
510
self._index_add_entry(subp, kind)
512
return added, ignored
514
def has_filename(self, filename):
515
return osutils.lexists(self.abspath(filename))
517
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
520
encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
521
for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
522
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
523
if self.controldir.is_control_filename(
524
dir_relpath.decode(osutils._fs_enc)):
526
for name in list(dirnames):
527
if self.controldir.is_control_filename(
528
name.decode(osutils._fs_enc)):
529
dirnames.remove(name)
531
relpath = os.path.join(dir_relpath, name)
534
yield relpath.decode(osutils._fs_enc)
535
except UnicodeDecodeError:
536
raise errors.BadFilenameEncoding(
537
relpath, osutils._fs_enc)
538
if not self._has_dir(relpath):
539
dirnames.remove(name)
540
for name in filenames:
541
if not self.mapping.is_special_file(name):
542
yp = os.path.join(dir_relpath, name)
544
yield yp.decode(osutils._fs_enc)
545
except UnicodeDecodeError:
546
raise errors.BadFilenameEncoding(
550
"""Yield all unversioned files in this WorkingTree.
552
with self.lock_read():
553
index_paths = set([p.decode('utf-8')
554
for p, i in self._recurse_index_entries()])
555
all_paths = set(self._iter_files_recursive(include_dirs=True))
556
for p in (all_paths - index_paths):
557
if not self._has_dir(p.encode('utf-8')):
560
def _gather_kinds(self, files, kinds):
561
"""See MutableTree._gather_kinds."""
562
with self.lock_tree_write():
563
for pos, f in enumerate(files):
564
if kinds[pos] is None:
565
fullpath = osutils.normpath(self.abspath(f))
567
kind = osutils.file_kind(fullpath)
569
if e.errno == errno.ENOENT:
570
raise errors.NoSuchFile(fullpath)
571
if (kind == 'directory' and f != '' and
572
os.path.exists(os.path.join(fullpath, '.git'))):
573
kind = 'tree-reference'
577
if self._lock_mode != 'w':
578
raise errors.NotWriteLocked(self)
579
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
580
# already in use and GitFile doesn't allow overriding the lock file
582
f = open(self.control_transport.local_abspath('index'), 'wb')
583
# Note that _flush will close the file
589
write_index_dict(shaf, self.index)
591
except BaseException:
594
self._index_dirty = False
596
def has_or_had_id(self, file_id):
597
if self.has_id(file_id):
599
if self.had_id(file_id):
603
def had_id(self, file_id):
604
path = self._basis_fileid_map.lookup_path(file_id)
606
head = self.repository._git.head()
608
# Assume no if basis is not accessible
611
root_tree = self.store[head].tree
615
tree_lookup_path(self.store.__getitem__,
616
root_tree, path.encode('utf-8'))
622
def get_file_mtime(self, path):
623
"""See Tree.get_file_mtime."""
625
return self._lstat(path).st_mtime
627
if e.errno == errno.ENOENT:
628
raise errors.NoSuchFile(path)
631
def is_ignored(self, filename):
632
r"""Check whether the filename matches an ignore pattern.
634
If the file is ignored, returns the pattern which caused it to
635
be ignored, otherwise None. So this can simply be used as a
636
boolean if desired."""
637
if getattr(self, '_global_ignoreglobster', None) is None:
639
ignore_globs.update(ignores.get_runtime_ignores())
640
ignore_globs.update(ignores.get_user_ignores())
641
self._global_ignoreglobster = globbing.ExceptionGlobster(
643
match = self._global_ignoreglobster.match(filename)
644
if match is not None:
647
if self.kind(filename) == 'directory':
649
except errors.NoSuchFile:
651
filename = filename.lstrip('/')
652
ignore_manager = self._get_ignore_manager()
653
ps = list(ignore_manager.find_matching(filename))
656
if not ps[-1].is_exclude:
660
def _get_ignore_manager(self):
661
ignoremanager = getattr(self, '_ignoremanager', None)
662
if ignoremanager is not None:
665
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
666
self._ignoremanager = ignore_manager
667
return ignore_manager
669
def _flush_ignore_list_cache(self):
670
self._ignoremanager = None
672
def set_last_revision(self, revid):
673
if _mod_revision.is_null(revid):
674
self.branch.set_last_revision_info(0, revid)
676
_mod_revision.check_not_reserved_id(revid)
678
self.branch.generate_revision_history(revid)
679
except errors.NoSuchRevision:
680
raise errors.GhostRevisionUnusableHere(revid)
682
def _reset_data(self):
684
head = self.repository._git.head()
686
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
688
self._basis_fileid_map = self.mapping.get_fileid_map(
689
self.store.__getitem__, self.store[head].tree)
690
self._fileid_map = self._basis_fileid_map.copy()
692
def get_file_verifier(self, path, stat_value=None):
693
with self.lock_read():
694
(index, subpath) = self._lookup_index(path.encode('utf-8'))
696
return ("GIT", index[subpath].sha)
698
if self._has_dir(path):
700
raise errors.NoSuchFile(path)
702
def get_file_sha1(self, path, stat_value=None):
703
with self.lock_read():
704
if not self.is_versioned(path):
705
raise errors.NoSuchFile(path)
706
abspath = self.abspath(path)
708
return osutils.sha_file_by_name(abspath)
710
if e.errno in (errno.EISDIR, errno.ENOENT):
714
def revision_tree(self, revid):
715
return self.repository.revision_tree(revid)
717
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
718
mode = stat_result.st_mode
719
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
721
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
722
return self.basis_tree().is_executable(path)
724
def stored_kind(self, path):
725
with self.lock_read():
726
encoded_path = path.encode('utf-8')
727
(index, subpath) = self._lookup_index(encoded_path)
729
return mode_kind(index[subpath].mode)
731
# Maybe it's a directory?
732
if self._has_dir(encoded_path):
734
raise errors.NoSuchFile(path)
736
def _lstat(self, path):
737
return os.lstat(self.abspath(path))
739
def _live_entry(self, path):
740
encoded_path = self.abspath(path.decode('utf-8')).encode(
742
return index_entry_from_path(encoded_path)
744
def is_executable(self, path):
745
with self.lock_read():
746
if getattr(self, "_supports_executable",
747
osutils.supports_executable)():
748
mode = self._lstat(path).st_mode
750
(index, subpath) = self._lookup_index(path.encode('utf-8'))
752
mode = index[subpath].mode
755
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
757
def _is_executable_from_path_and_stat(self, path, stat_result):
758
if getattr(self, "_supports_executable",
759
osutils.supports_executable)():
760
return self._is_executable_from_path_and_stat_from_stat(
763
return self._is_executable_from_path_and_stat_from_basis(
766
def list_files(self, include_root=False, from_dir=None, recursive=True):
767
if from_dir is None or from_dir == '.':
770
fk_entries = {'directory': tree.TreeDirectory,
771
'file': tree.TreeFile,
772
'symlink': tree.TreeLink,
773
'tree-reference': tree.TreeReference}
774
with self.lock_read():
775
root_ie = self._get_dir_ie(u"", None)
776
if include_root and not from_dir:
777
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
778
dir_ids[u""] = root_ie.file_id
780
path_iterator = sorted(
781
self._iter_files_recursive(from_dir, include_dirs=True))
783
encoded_from_dir = self.abspath(from_dir).encode(
785
path_iterator = sorted(
786
[os.path.join(from_dir, name.decode(osutils._fs_enc))
787
for name in os.listdir(encoded_from_dir)
788
if not self.controldir.is_control_filename(
789
name.decode(osutils._fs_enc)) and
790
not self.mapping.is_special_file(
791
name.decode(osutils._fs_enc))])
792
for path in path_iterator:
794
encoded_path = path.encode("utf-8")
795
except UnicodeEncodeError:
796
raise errors.BadFilenameEncoding(
797
path, osutils._fs_enc)
798
(index, index_path) = self._lookup_index(encoded_path)
800
value = index[index_path]
803
kind = self.kind(path)
804
parent, name = posixpath.split(path)
805
for dir_path, dir_ie in self._add_missing_parent_ids(
808
if kind in ('directory', 'tree-reference'):
810
if self._has_dir(encoded_path):
811
ie = self._get_dir_ie(path, self.path2id(path))
814
elif self.is_ignored(path):
816
ie = fk_entries[kind]()
820
ie = fk_entries[kind]()
823
posixpath.relpath(path, from_dir), status, kind,
826
if value is not None:
827
ie = self._get_file_ie(name, path, value, dir_ids[parent])
828
yield (posixpath.relpath(path, from_dir), "V", ie.kind,
831
ie = fk_entries[kind]()
832
yield (posixpath.relpath(path, from_dir), ("I" if
833
self.is_ignored(path) else "?"), kind, None, ie)
835
def all_file_ids(self):
836
with self.lock_read():
837
ids = {u"": self.path2id("")}
838
for path in self.index:
839
if self.mapping.is_special_file(path):
841
path = path.decode("utf-8")
842
parent = posixpath.dirname(path).strip("/")
843
for e in self._add_missing_parent_ids(parent, ids):
845
ids[path] = self.path2id(path)
846
return set(ids.values())
848
def all_versioned_paths(self):
849
with self.lock_read():
851
for path in self.index:
852
if self.mapping.is_special_file(path):
854
path = path.decode("utf-8")
857
path = posixpath.dirname(path).strip("/")
863
def iter_child_entries(self, path):
864
encoded_path = path.encode('utf-8')
865
with self.lock_read():
866
parent_id = self.path2id(path)
868
for item_path, value in self.index.iteritems():
869
decoded_item_path = item_path.decode('utf-8')
870
if self.mapping.is_special_file(item_path):
872
if not osutils.is_inside(path, decoded_item_path):
875
subpath = posixpath.relpath(decoded_item_path, path)
877
dirname = subpath.split('/', 1)[0]
878
file_ie = self._get_dir_ie(
879
posixpath.join(path, dirname), parent_id)
881
(unused_parent, name) = posixpath.split(decoded_item_path)
882
file_ie = self._get_file_ie(
883
name, decoded_item_path, value, parent_id)
885
if not found_any and path != u'':
886
raise errors.NoSuchFile(path)
889
with self.lock_read():
890
conflicts = _mod_conflicts.ConflictList()
891
for item_path, value in self.index.iteritems():
892
if value.flags & FLAG_STAGEMASK:
893
conflicts.append(_mod_conflicts.TextConflict(
894
item_path.decode('utf-8')))
897
def set_conflicts(self, conflicts):
899
for conflict in conflicts:
900
if conflict.typestring in ('text conflict', 'contents conflict'):
901
by_path.add(conflict.path.encode('utf-8'))
903
raise errors.UnsupportedOperation(self.set_conflicts, self)
904
with self.lock_tree_write():
905
for path in self.index:
906
self._set_conflicted(path, path in by_path)
908
def _set_conflicted(self, path, conflicted):
909
trace.mutter('change conflict: %r -> %r', path, conflicted)
910
value = self.index[path]
911
self._index_dirty = True
913
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
915
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
917
def add_conflicts(self, new_conflicts):
918
with self.lock_tree_write():
919
for conflict in new_conflicts:
920
if conflict.typestring in ('text conflict',
921
'contents conflict'):
923
self._set_conflicted(
924
conflict.path.encode('utf-8'), True)
926
raise errors.UnsupportedOperation(
927
self.add_conflicts, self)
929
raise errors.UnsupportedOperation(self.add_conflicts, self)
931
def walkdirs(self, prefix=""):
932
"""Walk the directories of this tree.
934
returns a generator which yields items in the form:
935
((curren_directory_path, fileid),
936
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
939
This API returns a generator, which is only valid during the current
940
tree transaction - within a single lock_read or lock_write duration.
942
If the tree is not locked, it may cause an error to be raised,
943
depending on the tree implementation.
945
from bisect import bisect_left
947
disk_top = self.abspath(prefix)
948
if disk_top.endswith('/'):
949
disk_top = disk_top[:-1]
950
top_strip_len = len(disk_top) + 1
951
inventory_iterator = self._walkdirs(prefix)
952
disk_iterator = osutils.walkdirs(disk_top, prefix)
954
current_disk = next(disk_iterator)
955
disk_finished = False
957
if not (e.errno == errno.ENOENT
958
or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
963
current_inv = next(inventory_iterator)
965
except StopIteration:
968
while not inv_finished or not disk_finished:
970
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
971
cur_disk_dir_content) = current_disk
973
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
974
cur_disk_dir_content) = ((None, None), None)
975
if not disk_finished:
976
# strip out .bzr dirs
977
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
978
and len(cur_disk_dir_content) > 0):
979
# osutils.walkdirs can be made nicer -
980
# yield the path-from-prefix rather than the pathjoined
982
bzrdir_loc = bisect_left(cur_disk_dir_content,
984
if (bzrdir_loc < len(cur_disk_dir_content) and
985
self.controldir.is_control_filename(
986
cur_disk_dir_content[bzrdir_loc][0])):
987
# we dont yield the contents of, or, .bzr itself.
988
del cur_disk_dir_content[bzrdir_loc]
990
# everything is unknown
993
# everything is missing
996
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
997
- (current_inv[0][0] < cur_disk_dir_relpath))
999
# disk is before inventory - unknown
1000
dirblock = [(relpath, basename, kind, stat, None, None) for
1001
relpath, basename, kind, stat, top_path in
1002
cur_disk_dir_content]
1003
yield (cur_disk_dir_relpath, None), dirblock
1005
current_disk = next(disk_iterator)
1006
except StopIteration:
1007
disk_finished = True
1009
# inventory is before disk - missing.
1010
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
1011
for relpath, basename, dkind, stat, fileid, kind in
1013
yield (current_inv[0][0], current_inv[0][1]), dirblock
1015
current_inv = next(inventory_iterator)
1016
except StopIteration:
1019
# versioned present directory
1020
# merge the inventory and disk data together
1022
for relpath, subiterator in itertools.groupby(sorted(
1023
current_inv[1] + cur_disk_dir_content,
1024
key=operator.itemgetter(0)), operator.itemgetter(1)):
1025
path_elements = list(subiterator)
1026
if len(path_elements) == 2:
1027
inv_row, disk_row = path_elements
1028
# versioned, present file
1029
dirblock.append((inv_row[0],
1030
inv_row[1], disk_row[2],
1031
disk_row[3], inv_row[4],
1033
elif len(path_elements[0]) == 5:
1036
(path_elements[0][0], path_elements[0][1],
1037
path_elements[0][2], path_elements[0][3],
1039
elif len(path_elements[0]) == 6:
1040
# versioned, absent file.
1042
(path_elements[0][0], path_elements[0][1],
1043
'unknown', None, path_elements[0][4],
1044
path_elements[0][5]))
1046
raise NotImplementedError('unreachable code')
1047
yield current_inv[0], dirblock
1049
current_inv = next(inventory_iterator)
1050
except StopIteration:
1053
current_disk = next(disk_iterator)
1054
except StopIteration:
1055
disk_finished = True
1057
def _walkdirs(self, prefix=u""):
1060
prefix = prefix.encode('utf-8')
1061
per_dir = defaultdict(set)
1063
per_dir[(u'', self.get_root_id())] = set()
1065
def add_entry(path, kind):
1066
if path == b'' or not path.startswith(prefix):
1068
(dirname, child_name) = posixpath.split(path)
1069
add_entry(dirname, 'directory')
1070
dirname = dirname.decode("utf-8")
1071
dir_file_id = self.path2id(dirname)
1072
if not isinstance(value, tuple) or len(value) != 10:
1073
raise ValueError(value)
1074
per_dir[(dirname, dir_file_id)].add(
1075
(path.decode("utf-8"), child_name.decode("utf-8"),
1077
self.path2id(path.decode("utf-8")),
1079
with self.lock_read():
1080
for path, value in self.index.iteritems():
1081
if self.mapping.is_special_file(path):
1083
if not path.startswith(prefix):
1085
add_entry(path, mode_kind(value.mode))
1086
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1088
def get_shelf_manager(self):
1089
raise workingtree.ShelvingUnsupported()
1091
def store_uncommitted(self):
1092
raise errors.StoringUncommittedNotSupported(self)
1094
def apply_inventory_delta(self, changes):
1095
for (old_path, new_path, file_id, ie) in changes:
1096
if old_path is not None:
1097
(index, old_subpath) = self._lookup_index(
1098
old_path.encode('utf-8'))
1100
self._index_del_entry(index, old_subpath)
1104
self._versioned_dirs = None
1105
if new_path is not None and ie.kind != 'directory':
1106
if ie.kind == 'tree-reference':
1107
self._index_add_entry(
1109
reference_revision=ie.reference_revision)
1111
self._index_add_entry(new_path, ie.kind)
1114
def annotate_iter(self, path,
1115
default_revision=_mod_revision.CURRENT_REVISION):
1116
"""See Tree.annotate_iter
1118
This implementation will use the basis tree implementation if possible.
1119
Lines not in the basis are attributed to CURRENT_REVISION
1121
If there are pending merges, lines added by those merges will be
1122
incorrectly attributed to CURRENT_REVISION (but after committing, the
1123
attribution will be correct).
1125
with self.lock_read():
1126
maybe_file_parent_keys = []
1127
for parent_id in self.get_parent_ids():
1129
parent_tree = self.revision_tree(parent_id)
1130
except errors.NoSuchRevisionInTree:
1131
parent_tree = self.branch.repository.revision_tree(
1133
with parent_tree.lock_read():
1134
# TODO(jelmer): Use rename/copy tracker to find path name
1138
kind = parent_tree.kind(parent_path)
1139
except errors.NoSuchFile:
1142
# Note: this is slightly unnecessary, because symlinks
1143
# and directories have a "text" which is the empty
1144
# text, and we know that won't mess up annotations. But
1149
parent_tree.get_file_revision(parent_path))
1150
if parent_text_key not in maybe_file_parent_keys:
1151
maybe_file_parent_keys.append(parent_text_key)
1152
# Now we have the parents of this content
1153
from breezy.annotate import Annotator
1154
from .annotate import AnnotateProvider
1155
annotate_provider = AnnotateProvider(
1156
self.branch.repository._file_change_scanner)
1157
annotator = Annotator(annotate_provider)
1159
from breezy.graph import Graph
1160
graph = Graph(annotate_provider)
1161
heads = graph.heads(maybe_file_parent_keys)
1162
file_parent_keys = []
1163
for key in maybe_file_parent_keys:
1165
file_parent_keys.append(key)
1167
text = self.get_file_text(path)
1168
this_key = (path, default_revision)
1169
annotator.add_special_text(this_key, file_parent_keys, text)
1170
annotations = [(key[-1], line)
1171
for key, line in annotator.annotate_flat(this_key)]
1174
def _rename_one(self, from_rel, to_rel):
1175
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1177
def _build_checkout_with_index(self):
1178
build_index_from_tree(
1179
self.user_transport.local_abspath('.'),
1180
self.control_transport.local_abspath("index"),
1183
if self.branch.head is None
1184
else self.store[self.branch.head].tree)
1186
def reset_state(self, revision_ids=None):
1187
"""Reset the state of the working tree.
1189
This does a hard-reset to a last-known-good state. This is a way to
1190
fix if something got corrupted (like the .git/index file)
1192
with self.lock_tree_write():
1193
if revision_ids is not None:
1194
self.set_parent_ids(revision_ids)
1196
self._index_dirty = True
1197
if self.branch.head is not None:
1198
for entry in self.store.iter_tree_contents(
1199
self.store[self.branch.head].tree):
1200
if not validate_path(entry.path):
1203
if S_ISGITLINK(entry.mode):
1204
pass # TODO(jelmer): record and return submodule paths
1206
# Let's at least try to use the working tree file:
1208
st = self._lstat(self.abspath(
1209
entry.path.decode('utf-8')))
1211
# But if it doesn't exist, we'll make something up.
1212
obj = self.store[entry.sha]
1213
st = os.stat_result((entry.mode, 0, 0, 0,
1215
obj.as_raw_string()), 0,
1217
(index, subpath) = self._lookup_index(entry.path)
1218
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1220
def pull(self, source, overwrite=False, stop_revision=None,
1221
change_reporter=None, possible_transports=None, local=False,
1223
with self.lock_write(), source.lock_read():
1224
old_revision = self.branch.last_revision()
1225
basis_tree = self.basis_tree()
1226
count = self.branch.pull(source, overwrite, stop_revision,
1227
possible_transports=possible_transports,
1229
new_revision = self.branch.last_revision()
1230
if new_revision != old_revision:
1231
with basis_tree.lock_read():
1232
new_basis_tree = self.branch.basis_tree()
1238
change_reporter=change_reporter,
1239
show_base=show_base)
1242
def add_reference(self, sub_tree):
1243
"""Add a TreeReference to the tree, pointing at sub_tree.
1245
:param sub_tree: subtree to add.
1247
with self.lock_tree_write():
1249
sub_tree_path = self.relpath(sub_tree.basedir)
1250
except errors.PathNotChild:
1251
raise BadReferenceTarget(
1252
self, sub_tree, 'Target not inside tree.')
1254
self._add([sub_tree_path], [None], ['tree-reference'])
1256
def _read_submodule_head(self, path):
1257
return read_submodule_head(self.abspath(path))
1259
def get_reference_revision(self, path):
1260
hexsha = self._read_submodule_head(path)
1262
return _mod_revision.NULL_REVISION
1263
return self.branch.lookup_foreign_revision_id(hexsha)
1265
def get_nested_tree(self, path):
1266
return workingtree.WorkingTree.open(self.abspath(path))
1268
def _directory_is_tree_reference(self, relpath):
1269
# as a special case, if a directory contains control files then
1270
# it's a tree reference, except that the root of the tree is not
1271
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1273
def extract(self, sub_path, format=None):
1274
"""Extract a subtree from this tree.
1276
A new branch will be created, relative to the path for this tree.
1279
segments = osutils.splitpath(path)
1280
transport = self.branch.controldir.root_transport
1281
for name in segments:
1282
transport = transport.clone(name)
1283
transport.ensure_base()
1286
with self.lock_tree_write():
1288
branch_transport = mkdirs(sub_path)
1290
format = self.controldir.cloning_metadir()
1291
branch_transport.ensure_base()
1292
branch_bzrdir = format.initialize_on_transport(branch_transport)
1294
repo = branch_bzrdir.find_repository()
1295
except errors.NoRepositoryPresent:
1296
repo = branch_bzrdir.create_repository()
1297
if not repo.supports_rich_root():
1298
raise errors.RootNotRich()
1299
new_branch = branch_bzrdir.create_branch()
1300
new_branch.pull(self.branch)
1301
for parent_id in self.get_parent_ids():
1302
new_branch.fetch(self.branch, parent_id)
1303
tree_transport = self.controldir.root_transport.clone(sub_path)
1304
if tree_transport.base != branch_transport.base:
1305
tree_bzrdir = format.initialize_on_transport(tree_transport)
1306
tree_bzrdir.set_branch_reference(new_branch)
1308
tree_bzrdir = branch_bzrdir
1309
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1310
wt.set_parent_ids(self.get_parent_ids())
1313
def _get_check_refs(self):
1314
"""Return the references needed to perform a check of this tree.
1316
The default implementation returns no refs, and is only suitable for
1317
trees that have no local caching and can commit on ghosts at any time.
1319
:seealso: breezy.check for details about check_refs.
1323
def copy_content_into(self, tree, revision_id=None):
1324
"""Copy the current content and user files of this tree into tree."""
1325
with self.lock_read():
1326
if revision_id is None:
1327
merge.transform_tree(tree, self)
1329
# TODO now merge from tree.last_revision to revision (to
1330
# preserve user local changes)
1332
other_tree = self.revision_tree(revision_id)
1333
except errors.NoSuchRevision:
1334
other_tree = self.branch.repository.revision_tree(
1337
merge.transform_tree(tree, other_tree)
1338
if revision_id == _mod_revision.NULL_REVISION:
1341
new_parents = [revision_id]
1342
tree.set_parent_ids(new_parents)
69
1345
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1347
_tree_class = GitWorkingTree
1349
supports_versioned_directories = False
1351
supports_setting_file_ids = False
1353
supports_store_uncommitted = False
1355
supports_leftmost_parent_id_as_ghost = False
1357
supports_righthand_parent_id_as_ghost = False
1359
requires_normalized_unicode_filenames = True
1361
supports_merge_modified = False
1364
def _matchingcontroldir(self):
1365
from .dir import LocalGitControlDirFormat
1366
return LocalGitControlDirFormat()
71
1368
def get_format_description(self):
72
1369
return "Git Working Tree"
1371
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1372
accelerator_tree=None, hardlink=False):
1373
"""See WorkingTreeFormat.initialize()."""
1374
if not isinstance(a_controldir, LocalGitDir):
1375
raise errors.IncompatibleFormat(self, a_controldir)
1376
branch = a_controldir.open_branch(nascent_ok=True)
1377
if revision_id is not None:
1378
branch.set_last_revision(revision_id)
1379
wt = GitWorkingTree(
1380
a_controldir, a_controldir.open_repository(), branch)
1381
for hook in MutableTree.hooks['post_build_tree']: