1
# Copyright (C) 2008-2018 Jelmer Vernooij <jelmer@jelmer.uk>
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.file import GitFile, FileLocked
29
from dulwich.index import (
32
build_index_from_tree,
34
index_entry_from_path,
35
index_entry_from_stat,
42
from dulwich.object_store import (
45
from dulwich.objects import (
52
from dulwich.repo import (
63
conflicts as _mod_conflicts,
65
controldir as _mod_controldir,
71
revision as _mod_revision,
73
transport as _mod_transport,
77
from ..decorators import (
83
from ..mutabletree import (
95
from .mapping import (
100
IGNORE_FILENAME = ".gitignore"
103
class GitWorkingTree(MutableGitIndexTree,workingtree.WorkingTree):
104
"""A Git working tree."""
106
def __init__(self, controldir, repo, branch):
107
MutableGitIndexTree.__init__(self)
108
basedir = controldir.root_transport.local_abspath('.')
109
self.basedir = osutils.realpath(basedir)
110
self.controldir = controldir
111
self.repository = repo
112
self.store = self.repository._git.object_store
113
self.mapping = self.repository.get_mapping()
114
self._branch = branch
115
self._transport = controldir.transport
116
self._format = GitWorkingTreeFormat()
118
self._index_file = None
119
self.views = self._make_views()
120
self._rules_searcher = None
121
self._detect_case_handling()
124
def supports_tree_reference(self):
127
def supports_rename_tracking(self):
130
def _read_index(self):
131
self.index = Index(self.control_transport.local_abspath('index'))
132
self._index_dirty = False
135
"""Lock the repository for read operations.
137
:return: A breezy.lock.LogicalLockResult.
139
if not self._lock_mode:
140
self._lock_mode = 'r'
144
self._lock_count += 1
145
self.branch.lock_read()
146
return lock.LogicalLockResult(self.unlock)
148
def _lock_write_tree(self):
149
if not self._lock_mode:
150
self._lock_mode = 'w'
153
self._index_file = GitFile(self.control_transport.local_abspath('index'), 'wb')
155
raise errors.LockContention('index')
157
elif self._lock_mode == 'r':
158
raise errors.ReadOnlyError(self)
162
def lock_tree_write(self):
163
self.branch.lock_read()
165
self._lock_write_tree()
166
return lock.LogicalLockResult(self.unlock)
171
def lock_write(self, token=None):
172
self.branch.lock_write()
174
self._lock_write_tree()
175
return lock.LogicalLockResult(self.unlock)
181
return self._lock_count >= 1
183
def get_physical_lock_status(self):
186
def break_lock(self):
188
self.control_transport.delete('index.lock')
189
except errors.NoSuchFile:
191
self.branch.break_lock()
193
@only_raises(errors.LockNotHeld, errors.LockBroken)
195
if not self._lock_count:
196
return lock.cant_unlock_not_held(self)
199
self._lock_count -= 1
200
if self._lock_count > 0:
202
if self._index_file is not None:
203
if self._index_dirty:
204
self._flush(self._index_file)
205
self._index_file.close()
207
# Something else already triggered a write of the index
208
# file by calling .flush()
209
self._index_file.abort()
210
self._index_file = None
211
self._lock_mode = None
219
def _detect_case_handling(self):
221
self._transport.stat(".git/cOnFiG")
222
except errors.NoSuchFile:
223
self.case_sensitive = True
225
self.case_sensitive = False
227
def merge_modified(self):
230
def set_merge_modified(self, modified_hashes):
231
raise errors.UnsupportedOperation(self.set_merge_modified, self)
233
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
234
self.set_parent_ids([p for p, t in parents_list])
236
def _set_merges_from_parent_ids(self, rhs_parent_ids):
238
merges = [self.branch.lookup_bzr_revision_id(revid)[0] for revid in rhs_parent_ids]
239
except errors.NoSuchRevision as e:
240
raise errors.GhostRevisionUnusableHere(e.revision)
242
self.control_transport.put_bytes('MERGE_HEAD', b'\n'.join(merges),
243
mode=self.controldir._get_file_mode())
246
self.control_transport.delete('MERGE_HEAD')
247
except errors.NoSuchFile:
250
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
251
"""Set the parent ids to revision_ids.
253
See also set_parent_trees. This api will try to retrieve the tree data
254
for each element of revision_ids from the trees repository. If you have
255
tree data already available, it is more efficient to use
256
set_parent_trees rather than set_parent_ids. set_parent_ids is however
257
an easier API to use.
259
:param revision_ids: The revision_ids to set as the parent ids of this
260
working tree. Any of these may be ghosts.
262
with self.lock_tree_write():
263
self._check_parents_for_ghosts(revision_ids,
264
allow_leftmost_as_ghost=allow_leftmost_as_ghost)
265
for revision_id in revision_ids:
266
_mod_revision.check_not_reserved_id(revision_id)
268
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
270
if len(revision_ids) > 0:
271
self.set_last_revision(revision_ids[0])
273
self.set_last_revision(_mod_revision.NULL_REVISION)
275
self._set_merges_from_parent_ids(revision_ids[1:])
277
def get_parent_ids(self):
278
"""See Tree.get_parent_ids.
280
This implementation reads the pending merges list and last_revision
281
value and uses that to decide what the parents list should be.
283
last_rev = _mod_revision.ensure_null(self._last_revision())
284
if _mod_revision.NULL_REVISION == last_rev:
289
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
290
except errors.NoSuchFile:
293
for l in osutils.split_lines(merges_bytes):
294
revision_id = l.rstrip(b'\n')
295
parents.append(self.branch.lookup_foreign_revision_id(revision_id))
298
def check_state(self):
299
"""Check that the working state is/isn't valid."""
302
def remove(self, files, verbose=False, to_file=None, keep_files=True,
304
"""Remove nominated files from the working tree metadata.
306
:param files: File paths relative to the basedir.
307
:param keep_files: If true, the files will also be kept.
308
:param force: Delete files and directories, even if they are changed
309
and even if the directories are not empty.
311
if not isinstance(files, list):
317
def backup(file_to_backup):
318
abs_path = self.abspath(file_to_backup)
319
backup_name = self.controldir._available_backup_name(file_to_backup)
320
osutils.rename(abs_path, self.abspath(backup_name))
321
return "removed %s (but kept a copy: %s)" % (
322
file_to_backup, backup_name)
324
# Sort needed to first handle directory content before the directory
329
def recurse_directory_to_add_files(directory):
330
# Recurse directory and add all files
331
# so we can check if they have changed.
332
for parent_info, file_infos in self.walkdirs(directory):
333
for relpath, basename, kind, lstat, fileid, kind in file_infos:
334
# Is it versioned or ignored?
335
if self.is_versioned(relpath):
336
# Add nested content for deletion.
337
all_files.add(relpath)
339
# Files which are not versioned
340
# should be treated as unknown.
341
files_to_backup.append(relpath)
343
with self.lock_tree_write():
344
for filepath in files:
345
# Get file name into canonical form.
346
abspath = self.abspath(filepath)
347
filepath = self.relpath(abspath)
350
all_files.add(filepath)
351
recurse_directory_to_add_files(filepath)
353
files = list(all_files)
356
return # nothing to do
358
# Sort needed to first handle directory content before the directory
359
files.sort(reverse=True)
361
# Bail out if we are going to delete files we shouldn't
362
if not keep_files and not force:
363
for (file_id, path, content_change, versioned, parent_id, name,
364
kind, executable) in self.iter_changes(self.basis_tree(),
365
include_unchanged=True, require_versioned=False,
366
want_unversioned=True, specific_files=files):
367
if versioned[0] == False:
368
# The record is unknown or newly added
369
files_to_backup.append(path[1])
370
files_to_backup.extend(osutils.parent_directories(path[1]))
371
elif (content_change and (kind[1] is not None) and
372
osutils.is_inside_any(files, path[1])):
373
# Versioned and changed, but not deleted, and still
374
# in one of the dirs to be deleted.
375
files_to_backup.append(path[1])
376
files_to_backup.extend(osutils.parent_directories(path[1]))
384
except errors.NoSuchFile:
387
abs_path = self.abspath(f)
389
# having removed it, it must be either ignored or unknown
390
if self.is_ignored(f):
394
kind_ch = osutils.kind_marker(kind)
395
to_file.write(new_status + ' ' + f + kind_ch + '\n')
397
message = "%s does not exist" % (f, )
400
if f in files_to_backup and not force:
403
if kind == 'directory':
404
osutils.rmtree(abs_path)
406
osutils.delete_any(abs_path)
407
message = "deleted %s" % (f,)
409
message = "removed %s" % (f,)
410
self._unversion_path(f)
412
# print only one message (if any) per file.
413
if message is not None:
415
self._versioned_dirs = None
417
def smart_add(self, file_list, recurse=True, action=None, save=True):
421
# expand any symlinks in the directory part, while leaving the
423
# only expanding if symlinks are supported avoids windows path bugs
424
if osutils.has_symlinks():
425
file_list = list(map(osutils.normalizepath, file_list))
427
conflicts_related = set()
428
for c in self.conflicts():
429
conflicts_related.update(c.associated_filenames())
434
def call_action(filepath, kind):
435
if action is not None:
436
parent_path = posixpath.dirname(filepath)
437
parent_id = self.path2id(parent_path)
438
parent_ie = self._get_dir_ie(parent_path, parent_id)
439
file_id = action(self, parent_ie, filepath, kind)
440
if file_id is not None:
441
raise workingtree.SettingFileIdUnsupported()
443
with self.lock_tree_write():
444
for filepath in osutils.canonical_relpaths(self.basedir, file_list):
445
filepath, can_access = osutils.normalized_filename(filepath)
447
raise errors.InvalidNormalization(filepath)
449
abspath = self.abspath(filepath)
450
kind = osutils.file_kind(abspath)
451
if kind in ("file", "symlink"):
452
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
456
call_action(filepath, kind)
458
self._index_add_entry(filepath, kind)
459
added.append(filepath)
460
elif kind == "directory":
461
(index, subpath) = self._lookup_index(filepath.encode('utf-8'))
462
if subpath not in index:
463
call_action(filepath, kind)
465
user_dirs.append(filepath)
467
raise errors.BadFileKindError(filename=abspath, kind=kind)
468
for user_dir in user_dirs:
469
abs_user_dir = self.abspath(user_dir)
472
transport = _mod_transport.get_transport_from_path(abs_user_dir)
473
_mod_controldir.ControlDirFormat.find_format(transport)
475
except errors.NotBranchError:
477
except errors.UnsupportedFormatError:
482
trace.warning('skipping nested tree %r', abs_user_dir)
485
for name in os.listdir(abs_user_dir):
486
subp = os.path.join(user_dir, name)
487
if self.is_control_filename(subp) or self.mapping.is_special_file(subp):
489
ignore_glob = self.is_ignored(subp)
490
if ignore_glob is not None:
491
ignored.setdefault(ignore_glob, []).append(subp)
493
abspath = self.abspath(subp)
494
kind = osutils.file_kind(abspath)
495
if kind == "directory":
496
user_dirs.append(subp)
498
if subp in self.index:
501
if subp in conflicts_related:
503
call_action(filepath, kind)
505
self._index_add_entry(subp, kind)
507
return added, ignored
509
def has_filename(self, filename):
510
return osutils.lexists(self.abspath(filename))
512
def _iter_files_recursive(self, from_dir=None, include_dirs=False):
515
for (dirpath, dirnames, filenames) in os.walk(self.abspath(from_dir).encode(osutils._fs_enc)):
516
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
517
if self.controldir.is_control_filename(dir_relpath.decode(osutils._fs_enc)):
519
for name in list(dirnames):
520
if self.controldir.is_control_filename(name.decode(osutils._fs_enc)):
521
dirnames.remove(name)
523
relpath = os.path.join(dir_relpath, name)
526
yield relpath.decode(osutils._fs_enc)
527
except UnicodeDecodeError:
528
raise errors.BadFilenameEncoding(
529
relpath, osutils._fs_enc)
530
if not self._has_dir(relpath):
531
dirnames.remove(name)
532
for name in filenames:
533
if not self.mapping.is_special_file(name):
534
yp = os.path.join(dir_relpath, name)
536
yield yp.decode(osutils._fs_enc)
537
except UnicodeDecodeError:
538
raise errors.BadFilenameEncoding(
542
"""Yield all unversioned files in this WorkingTree.
544
with self.lock_read():
545
index_paths = set([p.decode('utf-8') for p, i in self._recurse_index_entries()])
546
all_paths = set(self._iter_files_recursive(include_dirs=True))
547
for p in (all_paths - index_paths):
548
if not self._has_dir(p.encode('utf-8')):
551
def _gather_kinds(self, files, kinds):
552
"""See MutableTree._gather_kinds."""
553
with self.lock_tree_write():
554
for pos, f in enumerate(files):
555
if kinds[pos] is None:
556
fullpath = osutils.normpath(self.abspath(f))
558
kind = osutils.file_kind(fullpath)
560
if e.errno == errno.ENOENT:
561
raise errors.NoSuchFile(fullpath)
562
if kind == 'directory' and f != '' and os.path.exists(os.path.join(fullpath, '.git')):
563
kind = 'tree-reference'
567
if self._lock_mode != 'w':
568
raise errors.NotWriteLocked(self)
569
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
570
# already in use and GitFile doesn't allow overriding the lock file name :(
571
f = open(self.control_transport.local_abspath('index'), 'wb')
572
# Note that _flush will close the file
578
write_index_dict(shaf, self.index)
583
self._index_dirty = False
585
def has_or_had_id(self, file_id):
586
if self.has_id(file_id):
588
if self.had_id(file_id):
592
def had_id(self, file_id):
593
path = self._basis_fileid_map.lookup_path(file_id)
595
head = self.repository._git.head()
597
# Assume no if basis is not accessible
600
root_tree = self.store[head].tree
604
tree_lookup_path(self.store.__getitem__, root_tree, path.encode('utf-8'))
610
def get_file_mtime(self, path, file_id=None):
611
"""See Tree.get_file_mtime."""
613
return self._lstat(path).st_mtime
615
if e.errno == errno.ENOENT:
616
raise errors.NoSuchFile(path)
619
def is_ignored(self, filename):
620
r"""Check whether the filename matches an ignore pattern.
622
If the file is ignored, returns the pattern which caused it to
623
be ignored, otherwise None. So this can simply be used as a
624
boolean if desired."""
625
if getattr(self, '_global_ignoreglobster', None) is None:
627
ignore_globs.update(ignores.get_runtime_ignores())
628
ignore_globs.update(ignores.get_user_ignores())
629
self._global_ignoreglobster = globbing.ExceptionGlobster(ignore_globs)
630
match = self._global_ignoreglobster.match(filename)
631
if match is not None:
634
if self.kind(filename) == 'directory':
636
except errors.NoSuchFile:
638
filename = filename.lstrip('/')
639
ignore_manager = self._get_ignore_manager()
640
ps = list(ignore_manager.find_matching(filename))
643
if not ps[-1].is_exclude:
647
def _get_ignore_manager(self):
648
ignoremanager = getattr(self, '_ignoremanager', None)
649
if ignoremanager is not None:
652
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
653
self._ignoremanager = ignore_manager
654
return ignore_manager
656
def _flush_ignore_list_cache(self):
657
self._ignoremanager = None
659
def set_last_revision(self, revid):
660
if _mod_revision.is_null(revid):
661
self.branch.set_last_revision_info(0, revid)
663
_mod_revision.check_not_reserved_id(revid)
665
self.branch.generate_revision_history(revid)
666
except errors.NoSuchRevision:
667
raise errors.GhostRevisionUnusableHere(revid)
669
def _reset_data(self):
671
head = self.repository._git.head()
673
self._basis_fileid_map = GitFileIdMap({}, self.mapping)
675
self._basis_fileid_map = self.mapping.get_fileid_map(
676
self.store.__getitem__, self.store[head].tree)
677
self._fileid_map = self._basis_fileid_map.copy()
679
def get_file_verifier(self, path, file_id=None, stat_value=None):
680
with self.lock_read():
681
(index, subpath) = self._lookup_index(path.encode('utf-8'))
683
return ("GIT", index[subpath].sha)
685
if self._has_dir(path):
687
raise errors.NoSuchFile(path)
689
def get_file_sha1(self, path, file_id=None, stat_value=None):
690
with self.lock_read():
691
if not self.is_versioned(path):
692
raise errors.NoSuchFile(path)
693
abspath = self.abspath(path)
695
return osutils.sha_file_by_name(abspath)
697
if e.errno in (errno.EISDIR, errno.ENOENT):
701
def revision_tree(self, revid):
702
return self.repository.revision_tree(revid)
704
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
705
mode = stat_result.st_mode
706
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
708
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
709
return self.basis_tree().is_executable(path)
711
def stored_kind(self, path, file_id=None):
712
with self.lock_read():
713
encoded_path = path.encode('utf-8')
714
(index, subpath) = self._lookup_index(encoded_path)
716
return mode_kind(index[subpath].mode)
718
# Maybe it's a directory?
719
if self._has_dir(encoded_path):
721
raise errors.NoSuchFile(path)
723
def _lstat(self, path):
724
return os.lstat(self.abspath(path))
726
def _live_entry(self, path):
727
return index_entry_from_path(self.abspath(path.decode('utf-8')).encode(osutils._fs_enc))
729
def is_executable(self, path, file_id=None):
730
with self.lock_read():
731
if getattr(self, "_supports_executable", osutils.supports_executable)():
732
mode = self._lstat(path).st_mode
734
(index, subpath) = self._lookup_index(path.encode('utf-8'))
736
mode = index[subpath].mode
739
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
741
def _is_executable_from_path_and_stat(self, path, stat_result):
742
if getattr(self, "_supports_executable", osutils.supports_executable)():
743
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
745
return self._is_executable_from_path_and_stat_from_basis(path, stat_result)
747
def list_files(self, include_root=False, from_dir=None, recursive=True):
751
fk_entries = {'directory': tree.TreeDirectory,
752
'file': tree.TreeFile,
753
'symlink': tree.TreeLink,
754
'tree-reference': tree.TreeReference}
755
with self.lock_read():
756
root_ie = self._get_dir_ie(u"", None)
757
if include_root and not from_dir:
758
yield "", "V", root_ie.kind, root_ie.file_id, root_ie
759
dir_ids[u""] = root_ie.file_id
761
path_iterator = sorted(self._iter_files_recursive(from_dir, include_dirs=True))
763
path_iterator = sorted([os.path.join(from_dir, name.decode(osutils._fs_enc)) for name in
764
os.listdir(self.abspath(from_dir).encode(osutils._fs_enc))
765
if not self.controldir.is_control_filename(name.decode(osutils._fs_enc))
766
and not self.mapping.is_special_file(name.decode(osutils._fs_enc))])
767
for path in path_iterator:
769
encoded_path = path.encode("utf-8")
770
except UnicodeEncodeError:
771
raise errors.BadFilenameEncoding(
772
path, osutils._fs_enc)
773
(index, index_path) = self._lookup_index(encoded_path)
775
value = index[index_path]
778
kind = self.kind(path)
779
parent, name = posixpath.split(path)
780
for dir_path, dir_ie in self._add_missing_parent_ids(parent, dir_ids):
782
if kind in ('directory', 'tree-reference'):
784
if self._has_dir(encoded_path):
785
ie = self._get_dir_ie(path, self.path2id(path))
788
elif self.is_ignored(path):
790
ie = fk_entries[kind]()
794
ie = fk_entries[kind]()
796
yield posixpath.relpath(path, from_dir), status, kind, file_id, ie
798
if value is not None:
799
ie = self._get_file_ie(name, path, value, dir_ids[parent])
800
yield posixpath.relpath(path, from_dir), "V", ie.kind, ie.file_id, ie
802
ie = fk_entries[kind]()
803
yield posixpath.relpath(path, from_dir), ("I" if self.is_ignored(path) else "?"), kind, None, ie
805
def all_file_ids(self):
806
with self.lock_read():
807
ids = {u"": self.path2id("")}
808
for path in self.index:
809
if self.mapping.is_special_file(path):
811
path = path.decode("utf-8")
812
parent = posixpath.dirname(path).strip("/")
813
for e in self._add_missing_parent_ids(parent, ids):
815
ids[path] = self.path2id(path)
816
return set(ids.values())
818
def all_versioned_paths(self):
819
with self.lock_read():
821
for path in self.index:
822
if self.mapping.is_special_file(path):
824
path = path.decode("utf-8")
827
path = posixpath.dirname(path).strip("/")
833
def iter_child_entries(self, path, file_id=None):
834
encoded_path = path.encode('utf-8')
835
with self.lock_read():
836
parent_id = self.path2id(path)
838
seen_children = set()
839
for item_path, value in self.index.iteritems():
840
decoded_item_path = item_path.decode('utf-8')
841
if self.mapping.is_special_file(item_path):
843
if not osutils.is_inside(path, decoded_item_path):
846
subpath = posixpath.relpath(decoded_item_path, path)
848
dirname = subpath.split('/', 1)[0]
849
file_ie = self._get_dir_ie(posixpath.join(path, dirname), parent_id)
851
(unused_parent, name) = posixpath.split(decoded_item_path)
852
file_ie = self._get_file_ie(
853
name, decoded_item_path, value, parent_id)
855
if not found_any and path != u'':
856
raise errors.NoSuchFile(path)
859
with self.lock_read():
860
conflicts = _mod_conflicts.ConflictList()
861
for item_path, value in self.index.iteritems():
862
if value.flags & FLAG_STAGEMASK:
863
conflicts.append(_mod_conflicts.TextConflict(item_path.decode('utf-8')))
866
def set_conflicts(self, conflicts):
868
for conflict in conflicts:
869
if conflict.typestring in ('text conflict', 'contents conflict'):
870
by_path.add(conflict.path.encode('utf-8'))
872
raise errors.UnsupportedOperation(self.set_conflicts, self)
873
with self.lock_tree_write():
874
for path in self.index:
875
self._set_conflicted(path, path in by_path)
877
def _set_conflicted(self, path, conflicted):
878
trace.mutter('change conflict: %r -> %r', path, conflicted)
879
value = self.index[path]
880
self._index_dirty = True
882
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
884
self.index[path] = (value[:9] + (value[9] &~ FLAG_STAGEMASK, ))
886
def add_conflicts(self, new_conflicts):
887
with self.lock_tree_write():
888
for conflict in new_conflicts:
889
if conflict.typestring in ('text conflict', 'contents conflict'):
891
self._set_conflicted(conflict.path.encode('utf-8'), True)
893
raise errors.UnsupportedOperation(self.add_conflicts, self)
895
raise errors.UnsupportedOperation(self.add_conflicts, self)
897
def walkdirs(self, prefix=""):
898
"""Walk the directories of this tree.
900
returns a generator which yields items in the form:
901
((curren_directory_path, fileid),
902
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
905
This API returns a generator, which is only valid during the current
906
tree transaction - within a single lock_read or lock_write duration.
908
If the tree is not locked, it may cause an error to be raised,
909
depending on the tree implementation.
911
from bisect import bisect_left
913
disk_top = self.abspath(prefix)
914
if disk_top.endswith('/'):
915
disk_top = disk_top[:-1]
916
top_strip_len = len(disk_top) + 1
917
inventory_iterator = self._walkdirs(prefix)
918
disk_iterator = osutils.walkdirs(disk_top, prefix)
920
current_disk = next(disk_iterator)
921
disk_finished = False
923
if not (e.errno == errno.ENOENT or
924
(sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
929
current_inv = next(inventory_iterator)
931
except StopIteration:
934
while not inv_finished or not disk_finished:
936
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
937
cur_disk_dir_content) = current_disk
939
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
940
cur_disk_dir_content) = ((None, None), None)
941
if not disk_finished:
942
# strip out .bzr dirs
943
if (cur_disk_dir_path_from_top[top_strip_len:] == '' and
944
len(cur_disk_dir_content) > 0):
945
# osutils.walkdirs can be made nicer -
946
# yield the path-from-prefix rather than the pathjoined
948
bzrdir_loc = bisect_left(cur_disk_dir_content,
950
if (bzrdir_loc < len(cur_disk_dir_content)
951
and self.controldir.is_control_filename(
952
cur_disk_dir_content[bzrdir_loc][0])):
953
# we dont yield the contents of, or, .bzr itself.
954
del cur_disk_dir_content[bzrdir_loc]
956
# everything is unknown
959
# everything is missing
962
direction = ((current_inv[0][0] > cur_disk_dir_relpath) -
963
(current_inv[0][0] < cur_disk_dir_relpath))
965
# disk is before inventory - unknown
966
dirblock = [(relpath, basename, kind, stat, None, None) for
967
relpath, basename, kind, stat, top_path in
968
cur_disk_dir_content]
969
yield (cur_disk_dir_relpath, None), dirblock
971
current_disk = next(disk_iterator)
972
except StopIteration:
975
# inventory is before disk - missing.
976
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
977
for relpath, basename, dkind, stat, fileid, kind in
979
yield (current_inv[0][0], current_inv[0][1]), dirblock
981
current_inv = next(inventory_iterator)
982
except StopIteration:
985
# versioned present directory
986
# merge the inventory and disk data together
988
for relpath, subiterator in itertools.groupby(sorted(
989
current_inv[1] + cur_disk_dir_content,
990
key=operator.itemgetter(0)), operator.itemgetter(1)):
991
path_elements = list(subiterator)
992
if len(path_elements) == 2:
993
inv_row, disk_row = path_elements
994
# versioned, present file
995
dirblock.append((inv_row[0],
996
inv_row[1], disk_row[2],
997
disk_row[3], inv_row[4],
999
elif len(path_elements[0]) == 5:
1001
dirblock.append((path_elements[0][0],
1002
path_elements[0][1], path_elements[0][2],
1003
path_elements[0][3], None, None))
1004
elif len(path_elements[0]) == 6:
1005
# versioned, absent file.
1006
dirblock.append((path_elements[0][0],
1007
path_elements[0][1], 'unknown', None,
1008
path_elements[0][4], path_elements[0][5]))
1010
raise NotImplementedError('unreachable code')
1011
yield current_inv[0], dirblock
1013
current_inv = next(inventory_iterator)
1014
except StopIteration:
1017
current_disk = next(disk_iterator)
1018
except StopIteration:
1019
disk_finished = True
1021
def _walkdirs(self, prefix=u""):
1024
prefix = prefix.encode('utf-8')
1025
per_dir = defaultdict(set)
1027
per_dir[(u'', self.get_root_id())] = set()
1028
def add_entry(path, kind):
1029
if path == b'' or not path.startswith(prefix):
1031
(dirname, child_name) = posixpath.split(path)
1032
add_entry(dirname, 'directory')
1033
dirname = dirname.decode("utf-8")
1034
dir_file_id = self.path2id(dirname)
1035
if not isinstance(value, tuple) or len(value) != 10:
1036
raise ValueError(value)
1037
per_dir[(dirname, dir_file_id)].add(
1038
(path.decode("utf-8"), child_name.decode("utf-8"),
1040
self.path2id(path.decode("utf-8")),
1042
with self.lock_read():
1043
for path, value in self.index.iteritems():
1044
if self.mapping.is_special_file(path):
1046
if not path.startswith(prefix):
1048
add_entry(path, mode_kind(value.mode))
1049
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1051
def get_shelf_manager(self):
1052
raise workingtree.ShelvingUnsupported()
1054
def store_uncommitted(self):
1055
raise errors.StoringUncommittedNotSupported(self)
1057
def apply_inventory_delta(self, changes):
1058
for (old_path, new_path, file_id, ie) in changes:
1059
if old_path is not None:
1060
(index, old_subpath) = self._lookup_index(old_path.encode('utf-8'))
1062
self._index_del_entry(index, old_subpath)
1066
self._versioned_dirs = None
1067
if new_path is not None and ie.kind != 'directory':
1068
if ie.kind == 'tree-reference':
1069
self._index_add_entry(
1071
reference_revision=ie.reference_revision)
1073
self._index_add_entry(new_path, ie.kind)
1076
def annotate_iter(self, path, file_id=None,
1077
default_revision=_mod_revision.CURRENT_REVISION):
1078
"""See Tree.annotate_iter
1080
This implementation will use the basis tree implementation if possible.
1081
Lines not in the basis are attributed to CURRENT_REVISION
1083
If there are pending merges, lines added by those merges will be
1084
incorrectly attributed to CURRENT_REVISION (but after committing, the
1085
attribution will be correct).
1087
with self.lock_read():
1088
maybe_file_parent_keys = []
1089
for parent_id in self.get_parent_ids():
1091
parent_tree = self.revision_tree(parent_id)
1092
except errors.NoSuchRevisionInTree:
1093
parent_tree = self.branch.repository.revision_tree(
1095
with parent_tree.lock_read():
1096
# TODO(jelmer): Use rename/copy tracker to find path name in parent
1099
kind = parent_tree.kind(parent_path)
1100
except errors.NoSuchFile:
1103
# Note: this is slightly unnecessary, because symlinks and
1104
# directories have a "text" which is the empty text, and we
1105
# know that won't mess up annotations. But it seems cleaner
1109
parent_tree.get_file_revision(parent_path))
1110
if parent_text_key not in maybe_file_parent_keys:
1111
maybe_file_parent_keys.append(parent_text_key)
1112
# Now we have the parents of this content
1113
from breezy.annotate import Annotator
1114
from .annotate import AnnotateProvider
1115
annotate_provider = AnnotateProvider(
1116
self.branch.repository._file_change_scanner)
1117
annotator = Annotator(annotate_provider)
1119
from breezy.graph import Graph
1120
graph = Graph(annotate_provider)
1121
heads = graph.heads(maybe_file_parent_keys)
1122
file_parent_keys = []
1123
for key in maybe_file_parent_keys:
1125
file_parent_keys.append(key)
1127
text = self.get_file_text(path)
1128
this_key = (path, default_revision)
1129
annotator.add_special_text(this_key, file_parent_keys, text)
1130
annotations = [(key[-1], line)
1131
for key, line in annotator.annotate_flat(this_key)]
1134
def _rename_one(self, from_rel, to_rel):
1135
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1137
def _build_checkout_with_index(self):
1138
build_index_from_tree(
1139
self.user_transport.local_abspath('.'),
1140
self.control_transport.local_abspath("index"),
1142
None if self.branch.head is None else self.store[self.branch.head].tree)
1144
def reset_state(self, revision_ids=None):
1145
"""Reset the state of the working tree.
1147
This does a hard-reset to a last-known-good state. This is a way to
1148
fix if something got corrupted (like the .git/index file)
1150
with self.lock_tree_write():
1151
if revision_ids is not None:
1152
self.set_parent_ids(revision_ids)
1154
self._index_dirty = True
1155
if self.branch.head is not None:
1156
for entry in self.store.iter_tree_contents(self.store[self.branch.head].tree):
1157
if not validate_path(entry.path):
1160
if S_ISGITLINK(entry.mode):
1161
pass # TODO(jelmer): record and return submodule paths
1163
# Let's at least try to use the working tree file:
1165
st = self._lstat(self.abspath(entry.path.decode('utf-8')))
1167
# But if it doesn't exist, we'll make something up.
1168
obj = self.store[entry.sha]
1169
st = os.stat_result((entry.mode, 0, 0, 0,
1170
0, 0, len(obj.as_raw_string()), 0,
1172
(index, subpath) = self._lookup_index(entry.path)
1173
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1175
def pull(self, source, overwrite=False, stop_revision=None,
1176
change_reporter=None, possible_transports=None, local=False,
1178
with self.lock_write(), source.lock_read():
1179
old_revision = self.branch.last_revision()
1180
basis_tree = self.basis_tree()
1181
count = self.branch.pull(source, overwrite, stop_revision,
1182
possible_transports=possible_transports,
1184
new_revision = self.branch.last_revision()
1185
if new_revision != old_revision:
1186
with basis_tree.lock_read():
1187
new_basis_tree = self.branch.basis_tree()
1193
change_reporter=change_reporter,
1194
show_base=show_base)
1197
def add_reference(self, sub_tree):
1198
"""Add a TreeReference to the tree, pointing at sub_tree.
1200
:param sub_tree: subtree to add.
1202
with self.lock_tree_write():
1204
sub_tree_path = self.relpath(sub_tree.basedir)
1205
except errors.PathNotChild:
1206
raise BadReferenceTarget(
1207
self, sub_tree, 'Target not inside tree.')
1209
self._add([sub_tree_path], [None], ['tree-reference'])
1211
def _read_submodule_head(self, path):
1212
return read_submodule_head(self.abspath(path))
1214
def get_reference_revision(self, path, file_id=None):
1215
hexsha = self._read_submodule_head(path)
1217
return _mod_revision.NULL_REVISION
1218
return self.branch.lookup_foreign_revision_id(hexsha)
1220
def get_nested_tree(self, path, file_id=None):
1221
return workingtree.WorkingTree.open(self.abspath(path))
1223
def _directory_is_tree_reference(self, relpath):
1224
# as a special case, if a directory contains control files then
1225
# it's a tree reference, except that the root of the tree is not
1226
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1228
def extract(self, sub_path, file_id=None, format=None):
1229
"""Extract a subtree from this tree.
1231
A new branch will be created, relative to the path for this tree.
1234
segments = osutils.splitpath(path)
1235
transport = self.branch.controldir.root_transport
1236
for name in segments:
1237
transport = transport.clone(name)
1238
transport.ensure_base()
1241
with self.lock_tree_write():
1243
branch_transport = mkdirs(sub_path)
1245
format = self.controldir.cloning_metadir()
1246
branch_transport.ensure_base()
1247
branch_bzrdir = format.initialize_on_transport(branch_transport)
1249
repo = branch_bzrdir.find_repository()
1250
except errors.NoRepositoryPresent:
1251
repo = branch_bzrdir.create_repository()
1252
if not repo.supports_rich_root():
1253
raise errors.RootNotRich()
1254
new_branch = branch_bzrdir.create_branch()
1255
new_branch.pull(self.branch)
1256
for parent_id in self.get_parent_ids():
1257
new_branch.fetch(self.branch, parent_id)
1258
tree_transport = self.controldir.root_transport.clone(sub_path)
1259
if tree_transport.base != branch_transport.base:
1260
tree_bzrdir = format.initialize_on_transport(tree_transport)
1261
tree_bzrdir.set_branch_reference(new_branch)
1263
tree_bzrdir = branch_bzrdir
1264
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1265
wt.set_parent_ids(self.get_parent_ids())
1268
def _get_check_refs(self):
1269
"""Return the references needed to perform a check of this tree.
1271
The default implementation returns no refs, and is only suitable for
1272
trees that have no local caching and can commit on ghosts at any time.
1274
:seealso: breezy.check for details about check_refs.
1278
def copy_content_into(self, tree, revision_id=None):
1279
"""Copy the current content and user files of this tree into tree."""
1280
with self.lock_read():
1281
if revision_id is None:
1282
merge.transform_tree(tree, self)
1284
# TODO now merge from tree.last_revision to revision (to
1285
# preserve user local changes)
1287
other_tree = self.revision_tree(revision_id)
1288
except errors.NoSuchRevision:
1289
other_tree = self.branch.repository.revision_tree(
1292
merge.transform_tree(tree, other_tree)
1293
if revision_id == _mod_revision.NULL_REVISION:
1296
new_parents = [revision_id]
1297
tree.set_parent_ids(new_parents)
1300
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1302
_tree_class = GitWorkingTree
1304
supports_versioned_directories = False
1306
supports_setting_file_ids = False
1308
supports_store_uncommitted = False
1310
supports_leftmost_parent_id_as_ghost = False
1312
supports_righthand_parent_id_as_ghost = False
1314
requires_normalized_unicode_filenames = True
1316
supports_merge_modified = False
1319
def _matchingcontroldir(self):
1320
from .dir import LocalGitControlDirFormat
1321
return LocalGitControlDirFormat()
1323
def get_format_description(self):
1324
return "Git Working Tree"
1326
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1327
accelerator_tree=None, hardlink=False):
1328
"""See WorkingTreeFormat.initialize()."""
1329
if not isinstance(a_controldir, LocalGitDir):
1330
raise errors.IncompatibleFormat(self, a_controldir)
1331
branch = a_controldir.open_branch(nascent_ok=True)
1332
if revision_id is not None:
1333
branch.set_last_revision(revision_id)
1334
wt = GitWorkingTree(
1335
a_controldir, a_controldir.open_repository(), branch)
1336
for hook in MutableTree.hooks['post_build_tree']: