13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
"""An adapter between a Git index and a Bazaar Working Tree"""
20
from __future__ import absolute_import
23
from collections import defaultdict
25
from dulwich.ignore import (
28
from dulwich.config import ConfigFile as GitConfigFile
29
from dulwich.file import GitFile, FileLocked
30
from dulwich.index import (
33
build_index_from_tree,
34
index_entry_from_path,
35
index_entry_from_stat,
41
from dulwich.object_store import (
44
from dulwich.objects import (
53
branch as _mod_branch,
54
conflicts as _mod_conflicts,
56
controldir as _mod_controldir,
62
revision as _mod_revision,
64
transport as _mod_transport,
30
from dulwich.index import Index
32
class GitWorkingTree(workingtree.WorkingTree):
69
from ..decorators import (
72
from ..mutabletree import (
84
from .mapping import (
89
class GitWorkingTree(MutableGitIndexTree, workingtree.WorkingTree):
33
90
"""A Git working tree."""
35
def __init__(self, bzrdir, repo, branch):
36
self.basedir = bzrdir.transport.base
92
def __init__(self, controldir, repo, branch):
93
MutableGitIndexTree.__init__(self)
94
basedir = controldir.root_transport.local_abspath('.')
95
self.basedir = osutils.realpath(basedir)
96
self.controldir = controldir
38
97
self.repository = repo
98
self.store = self.repository._git.object_store
99
self.mapping = self.repository.get_mapping()
39
100
self._branch = branch
40
self._transport = bzrdir.transport
42
self.controldir = urlutils.join(self.repository._git.path, 'bzr')
45
os.makedirs(self.controldir)
46
os.makedirs(os.path.join(self.controldir, 'lock'))
50
self._control_files = lockable_files.LockableFiles(
51
transport.get_transport(self.controldir), 'lock', lockdir.LockDir)
101
self._transport = self.repository._git._controltransport
53
102
self._format = GitWorkingTreeFormat()
55
self.index = Index(os.path.join(self.repository._git.controldir(),
104
self._index_file = None
105
self.views = self._make_views()
106
self._rules_searcher = None
107
self._detect_case_handling()
110
def supports_tree_reference(self):
113
def supports_rename_tracking(self):
116
def _read_index(self):
117
self.index = Index(self.control_transport.local_abspath('index'))
118
self._index_dirty = False
120
def _get_submodule_index(self, relpath):
121
if not isinstance(relpath, bytes):
122
raise TypeError(relpath)
124
info = self._submodule_info()[relpath]
126
index_path = os.path.join(self.basedir, relpath.decode('utf-8'), '.git', 'index')
128
index_path = self.control_transport.local_abspath(
129
posixpath.join('modules', info[1], 'index'))
130
return Index(index_path)
133
"""Lock the repository for read operations.
135
:return: A breezy.lock.LogicalLockResult.
137
if not self._lock_mode:
138
self._lock_mode = 'r'
142
self._lock_count += 1
143
self.branch.lock_read()
144
return lock.LogicalLockResult(self.unlock)
146
def _lock_write_tree(self):
147
if not self._lock_mode:
148
self._lock_mode = 'w'
151
self._index_file = GitFile(
152
self.control_transport.local_abspath('index'), 'wb')
154
raise errors.LockContention('index')
156
elif self._lock_mode == 'r':
157
raise errors.ReadOnlyError(self)
159
self._lock_count += 1
161
def lock_tree_write(self):
162
self.branch.lock_read()
164
self._lock_write_tree()
165
return lock.LogicalLockResult(self.unlock)
166
except BaseException:
170
def lock_write(self, token=None):
171
self.branch.lock_write()
173
self._lock_write_tree()
174
return lock.LogicalLockResult(self.unlock)
175
except BaseException:
180
return self._lock_count >= 1
182
def get_physical_lock_status(self):
185
def break_lock(self):
187
self.control_transport.delete('index.lock')
188
except errors.NoSuchFile:
190
self.branch.break_lock()
192
@only_raises(errors.LockNotHeld, errors.LockBroken)
59
# non-implementation specific cleanup
62
# reverse order of locking.
194
if not self._lock_count:
195
return lock.cant_unlock_not_held(self)
64
return self._control_files.unlock()
198
self._lock_count -= 1
199
if self._lock_count > 0:
201
if self._index_file is not None:
202
if self._index_dirty:
203
self._flush(self._index_file)
204
self._index_file.close()
206
# Something else already triggered a write of the index
207
# file by calling .flush()
208
self._index_file.abort()
209
self._index_file = None
210
self._lock_mode = None
66
213
self.branch.unlock()
68
def is_control_filename(self, path):
69
return os.path.basename(path) == ".git"
71
def _get_inventory(self):
72
return inventory.Inventory()
74
inventory = property(_get_inventory,
75
doc="Inventory of this Tree")
218
def _detect_case_handling(self):
220
self._transport.stat(".git/cOnFiG")
221
except errors.NoSuchFile:
222
self.case_sensitive = True
224
self.case_sensitive = False
226
def get_transform(self, pb=None):
227
from ..transform import TreeTransform
228
return TreeTransform(self, pb=pb)
230
def merge_modified(self):
233
def set_merge_modified(self, modified_hashes):
234
raise errors.UnsupportedOperation(self.set_merge_modified, self)
236
def set_parent_trees(self, parents_list, allow_leftmost_as_ghost=False):
237
self.set_parent_ids([p for p, t in parents_list])
239
def _set_merges_from_parent_ids(self, rhs_parent_ids):
241
merges = [self.branch.lookup_bzr_revision_id(
242
revid)[0] for revid in rhs_parent_ids]
243
except errors.NoSuchRevision as e:
244
raise errors.GhostRevisionUnusableHere(e.revision)
246
self.control_transport.put_bytes(
247
'MERGE_HEAD', b'\n'.join(merges),
248
mode=self.controldir._get_file_mode())
251
self.control_transport.delete('MERGE_HEAD')
252
except errors.NoSuchFile:
255
def set_parent_ids(self, revision_ids, allow_leftmost_as_ghost=False):
256
"""Set the parent ids to revision_ids.
258
See also set_parent_trees. This api will try to retrieve the tree data
259
for each element of revision_ids from the trees repository. If you have
260
tree data already available, it is more efficient to use
261
set_parent_trees rather than set_parent_ids. set_parent_ids is however
262
an easier API to use.
264
:param revision_ids: The revision_ids to set as the parent ids of this
265
working tree. Any of these may be ghosts.
267
with self.lock_tree_write():
268
self._check_parents_for_ghosts(
269
revision_ids, allow_leftmost_as_ghost=allow_leftmost_as_ghost)
270
for revision_id in revision_ids:
271
_mod_revision.check_not_reserved_id(revision_id)
273
revision_ids = self._filter_parent_ids_by_ancestry(revision_ids)
275
if len(revision_ids) > 0:
276
self.set_last_revision(revision_ids[0])
278
self.set_last_revision(_mod_revision.NULL_REVISION)
280
self._set_merges_from_parent_ids(revision_ids[1:])
282
def get_parent_ids(self):
283
"""See Tree.get_parent_ids.
285
This implementation reads the pending merges list and last_revision
286
value and uses that to decide what the parents list should be.
288
last_rev = _mod_revision.ensure_null(self._last_revision())
289
if _mod_revision.NULL_REVISION == last_rev:
294
merges_bytes = self.control_transport.get_bytes('MERGE_HEAD')
295
except errors.NoSuchFile:
298
for l in osutils.split_lines(merges_bytes):
299
revision_id = l.rstrip(b'\n')
301
self.branch.lookup_foreign_revision_id(revision_id))
304
def check_state(self):
305
"""Check that the working state is/isn't valid."""
308
def remove(self, files, verbose=False, to_file=None, keep_files=True,
310
"""Remove nominated files from the working tree metadata.
312
:param files: File paths relative to the basedir.
313
:param keep_files: If true, the files will also be kept.
314
:param force: Delete files and directories, even if they are changed
315
and even if the directories are not empty.
317
if not isinstance(files, list):
323
def backup(file_to_backup):
324
abs_path = self.abspath(file_to_backup)
325
backup_name = self.controldir._available_backup_name(
327
osutils.rename(abs_path, self.abspath(backup_name))
328
return "removed %s (but kept a copy: %s)" % (
329
file_to_backup, backup_name)
331
# Sort needed to first handle directory content before the directory
336
def recurse_directory_to_add_files(directory):
337
# Recurse directory and add all files
338
# so we can check if they have changed.
339
for parent_info, file_infos in self.walkdirs(directory):
340
for relpath, basename, kind, lstat, fileid, kind in file_infos:
341
# Is it versioned or ignored?
342
if self.is_versioned(relpath):
343
# Add nested content for deletion.
344
all_files.add(relpath)
346
# Files which are not versioned
347
# should be treated as unknown.
348
files_to_backup.append(relpath)
350
with self.lock_tree_write():
351
for filepath in files:
352
# Get file name into canonical form.
353
abspath = self.abspath(filepath)
354
filepath = self.relpath(abspath)
357
all_files.add(filepath)
358
recurse_directory_to_add_files(filepath)
360
files = list(all_files)
363
return # nothing to do
365
# Sort needed to first handle directory content before the
367
files.sort(reverse=True)
369
# Bail out if we are going to delete files we shouldn't
370
if not keep_files and not force:
371
for change in self.iter_changes(
372
self.basis_tree(), include_unchanged=True,
373
require_versioned=False, want_unversioned=True,
374
specific_files=files):
375
if change.versioned[0] is False:
376
# The record is unknown or newly added
377
files_to_backup.append(change.path[1])
378
files_to_backup.extend(
379
osutils.parent_directories(change.path[1]))
380
elif (change.changed_content and (change.kind[1] is not None)
381
and osutils.is_inside_any(files, change.path[1])):
382
# Versioned and changed, but not deleted, and still
383
# in one of the dirs to be deleted.
384
files_to_backup.append(change.path[1])
385
files_to_backup.extend(
386
osutils.parent_directories(change.path[1]))
394
except errors.NoSuchFile:
397
abs_path = self.abspath(f)
399
# having removed it, it must be either ignored or unknown
400
if self.is_ignored(f):
404
kind_ch = osutils.kind_marker(kind)
405
to_file.write(new_status + ' ' + f + kind_ch + '\n')
407
message = "%s does not exist" % (f, )
410
if f in files_to_backup and not force:
413
if kind == 'directory':
414
osutils.rmtree(abs_path)
416
osutils.delete_any(abs_path)
417
message = "deleted %s" % (f,)
419
message = "removed %s" % (f,)
420
self._unversion_path(f)
422
# print only one message (if any) per file.
423
if message is not None:
425
self._versioned_dirs = None
427
def smart_add(self, file_list, recurse=True, action=None, save=True):
431
# expand any symlinks in the directory part, while leaving the
433
# only expanding if symlinks are supported avoids windows path bugs
434
if self.supports_symlinks():
435
file_list = list(map(osutils.normalizepath, file_list))
437
conflicts_related = set()
438
for c in self.conflicts():
439
conflicts_related.update(c.associated_filenames())
445
def call_action(filepath, kind):
448
if action is not None:
449
parent_path = posixpath.dirname(filepath)
450
parent_id = self.path2id(parent_path)
451
parent_ie = self._get_dir_ie(parent_path, parent_id)
452
file_id = action(self, parent_ie, filepath, kind)
453
if file_id is not None:
454
raise workingtree.SettingFileIdUnsupported()
456
with self.lock_tree_write():
457
for filepath in osutils.canonical_relpaths(
458
self.basedir, file_list):
459
filepath, can_access = osutils.normalized_filename(filepath)
461
raise errors.InvalidNormalization(filepath)
463
abspath = self.abspath(filepath)
464
kind = osutils.file_kind(abspath)
465
if kind in ("file", "symlink"):
466
(index, subpath) = self._lookup_index(
467
filepath.encode('utf-8'))
471
call_action(filepath, kind)
473
self._index_add_entry(filepath, kind)
474
added.append(filepath)
475
elif kind == "directory":
476
(index, subpath) = self._lookup_index(
477
filepath.encode('utf-8'))
478
if subpath not in index:
479
call_action(filepath, kind)
481
user_dirs.append(filepath)
483
raise errors.BadFileKindError(filename=abspath, kind=kind)
484
for user_dir in user_dirs:
485
abs_user_dir = self.abspath(user_dir)
488
transport = _mod_transport.get_transport_from_path(
490
_mod_controldir.ControlDirFormat.find_format(transport)
492
except errors.NotBranchError:
494
except errors.UnsupportedFormatError:
499
trace.warning('skipping nested tree %r', abs_user_dir)
502
for name in os.listdir(abs_user_dir):
503
subp = os.path.join(user_dir, name)
504
if (self.is_control_filename(subp) or
505
self.mapping.is_special_file(subp)):
507
ignore_glob = self.is_ignored(subp)
508
if ignore_glob is not None:
509
ignored.setdefault(ignore_glob, []).append(subp)
511
abspath = self.abspath(subp)
512
kind = osutils.file_kind(abspath)
513
if kind == "directory":
514
user_dirs.append(subp)
516
(index, subpath) = self._lookup_index(
517
subp.encode('utf-8'))
521
if subp in conflicts_related:
523
call_action(subp, kind)
525
self._index_add_entry(subp, kind)
527
return added, ignored
529
def has_filename(self, filename):
530
return osutils.lexists(self.abspath(filename))
532
def _iter_files_recursive(self, from_dir=None, include_dirs=False,
533
recurse_nested=False):
536
if not isinstance(from_dir, str):
537
raise TypeError(from_dir)
538
encoded_from_dir = self.abspath(from_dir).encode(osutils._fs_enc)
539
for (dirpath, dirnames, filenames) in os.walk(encoded_from_dir):
540
dir_relpath = dirpath[len(self.basedir):].strip(b"/")
541
if self.controldir.is_control_filename(
542
dir_relpath.decode(osutils._fs_enc)):
544
for name in list(dirnames):
545
if self.controldir.is_control_filename(
546
name.decode(osutils._fs_enc)):
547
dirnames.remove(name)
549
relpath = os.path.join(dir_relpath, name)
550
if not recurse_nested and self._directory_is_tree_reference(relpath.decode(osutils._fs_enc)):
551
dirnames.remove(name)
554
yield relpath.decode(osutils._fs_enc)
555
except UnicodeDecodeError:
556
raise errors.BadFilenameEncoding(
557
relpath, osutils._fs_enc)
558
if not self.is_versioned(relpath.decode(osutils._fs_enc)):
559
dirnames.remove(name)
560
for name in filenames:
561
if self.mapping.is_special_file(name):
563
if self.controldir.is_control_filename(
564
name.decode(osutils._fs_enc, 'replace')):
566
yp = os.path.join(dir_relpath, name)
568
yield yp.decode(osutils._fs_enc)
569
except UnicodeDecodeError:
570
raise errors.BadFilenameEncoding(
574
"""Yield all unversioned files in this WorkingTree.
576
with self.lock_read():
578
[p.decode('utf-8') for p, i in self._recurse_index_entries()])
579
all_paths = set(self._iter_files_recursive(include_dirs=False))
580
return iter(all_paths - index_paths)
582
def _gather_kinds(self, files, kinds):
583
"""See MutableTree._gather_kinds."""
584
with self.lock_tree_write():
585
for pos, f in enumerate(files):
586
if kinds[pos] is None:
587
fullpath = osutils.normpath(self.abspath(f))
589
kind = osutils.file_kind(fullpath)
591
if e.errno == errno.ENOENT:
592
raise errors.NoSuchFile(fullpath)
593
if f != '' and self._directory_is_tree_reference(f):
594
kind = 'tree-reference'
598
if self._lock_mode != 'w':
599
raise errors.NotWriteLocked(self)
600
# TODO(jelmer): This shouldn't be writing in-place, but index.lock is
601
# already in use and GitFile doesn't allow overriding the lock file
603
f = open(self.control_transport.local_abspath('index'), 'wb')
604
# Note that _flush will close the file
610
write_index_dict(shaf, self.index)
612
except BaseException:
615
self._index_dirty = False
617
def get_file_mtime(self, path):
618
"""See Tree.get_file_mtime."""
620
return self._lstat(path).st_mtime
622
if e.errno == errno.ENOENT:
623
raise errors.NoSuchFile(path)
626
def is_ignored(self, filename):
627
r"""Check whether the filename matches an ignore pattern.
629
If the file is ignored, returns the pattern which caused it to
630
be ignored, otherwise None. So this can simply be used as a
631
boolean if desired."""
632
if getattr(self, '_global_ignoreglobster', None) is None:
634
ignore_globs.update(ignores.get_runtime_ignores())
635
ignore_globs.update(ignores.get_user_ignores())
636
self._global_ignoreglobster = globbing.ExceptionGlobster(
638
match = self._global_ignoreglobster.match(filename)
639
if match is not None:
642
if self.kind(filename) == 'directory':
644
except errors.NoSuchFile:
646
filename = filename.lstrip('/')
647
ignore_manager = self._get_ignore_manager()
648
ps = list(ignore_manager.find_matching(filename))
651
if not ps[-1].is_exclude:
655
def _get_ignore_manager(self):
656
ignoremanager = getattr(self, '_ignoremanager', None)
657
if ignoremanager is not None:
660
ignore_manager = IgnoreFilterManager.from_repo(self.repository._git)
661
self._ignoremanager = ignore_manager
662
return ignore_manager
664
def _flush_ignore_list_cache(self):
665
self._ignoremanager = None
667
def set_last_revision(self, revid):
668
if _mod_revision.is_null(revid):
669
self.branch.set_last_revision_info(0, revid)
671
_mod_revision.check_not_reserved_id(revid)
673
self.branch.generate_revision_history(revid)
674
except errors.NoSuchRevision:
675
raise errors.GhostRevisionUnusableHere(revid)
677
def _reset_data(self):
680
def get_file_verifier(self, path, stat_value=None):
681
with self.lock_read():
682
(index, subpath) = self._lookup_index(path.encode('utf-8'))
684
return ("GIT", index[subpath].sha)
686
if self._has_dir(path):
688
raise errors.NoSuchFile(path)
690
def get_file_sha1(self, path, stat_value=None):
691
with self.lock_read():
692
if not self.is_versioned(path):
693
raise errors.NoSuchFile(path)
694
abspath = self.abspath(path)
696
return osutils.sha_file_by_name(abspath)
698
if e.errno in (errno.EISDIR, errno.ENOENT):
702
def revision_tree(self, revid):
703
return self.repository.revision_tree(revid)
705
def _is_executable_from_path_and_stat_from_stat(self, path, stat_result):
706
mode = stat_result.st_mode
707
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
709
def _is_executable_from_path_and_stat_from_basis(self, path, stat_result):
710
return self.basis_tree().is_executable(path)
712
def stored_kind(self, path):
713
with self.lock_read():
714
encoded_path = path.encode('utf-8')
715
(index, subpath) = self._lookup_index(encoded_path)
717
return mode_kind(index[subpath].mode)
719
# Maybe it's a directory?
720
if self._has_dir(encoded_path):
722
raise errors.NoSuchFile(path)
724
def _lstat(self, path):
725
return os.lstat(self.abspath(path))
727
def _live_entry(self, path):
728
encoded_path = self.abspath(path.decode('utf-8')).encode(
730
return index_entry_from_path(encoded_path)
732
def is_executable(self, path):
733
with self.lock_read():
734
if self._supports_executable():
735
mode = self._lstat(path).st_mode
737
(index, subpath) = self._lookup_index(path.encode('utf-8'))
739
mode = index[subpath].mode
742
return bool(stat.S_ISREG(mode) and stat.S_IEXEC & mode)
744
def _is_executable_from_path_and_stat(self, path, stat_result):
745
if self._supports_executable():
746
return self._is_executable_from_path_and_stat_from_stat(path, stat_result)
748
return self._is_executable_from_path_and_stat_from_basis(
751
def list_files(self, include_root=False, from_dir=None, recursive=True,
752
recurse_nested=False):
753
if from_dir is None or from_dir == '.':
756
fk_entries = {'directory': tree.TreeDirectory,
757
'file': tree.TreeFile,
758
'symlink': tree.TreeLink,
759
'tree-reference': tree.TreeReference}
760
with self.lock_read():
761
root_ie = self._get_dir_ie(u"", None)
762
if include_root and not from_dir:
763
yield "", "V", root_ie.kind, root_ie
764
dir_ids[u""] = root_ie.file_id
766
path_iterator = sorted(
767
self._iter_files_recursive(
768
from_dir, include_dirs=True,
769
recurse_nested=recurse_nested))
771
encoded_from_dir = self.abspath(from_dir).encode(
773
path_iterator = sorted(
774
[os.path.join(from_dir, name.decode(osutils._fs_enc))
775
for name in os.listdir(encoded_from_dir)
776
if not self.controldir.is_control_filename(
777
name.decode(osutils._fs_enc)) and
778
not self.mapping.is_special_file(
779
name.decode(osutils._fs_enc))])
780
for path in path_iterator:
782
encoded_path = path.encode("utf-8")
783
except UnicodeEncodeError:
784
raise errors.BadFilenameEncoding(
785
path, osutils._fs_enc)
786
(index, index_path) = self._lookup_index(encoded_path)
788
value = index[index_path]
791
kind = self.kind(path)
792
parent, name = posixpath.split(path)
793
for dir_path, dir_ie in self._add_missing_parent_ids(
796
if kind == 'tree-reference' and recurse_nested:
797
ie = self._get_dir_ie(path, self.path2id(path))
798
yield (posixpath.relpath(path, from_dir), 'V', 'directory',
801
if kind == 'directory':
803
if self._has_dir(encoded_path):
804
ie = self._get_dir_ie(path, self.path2id(path))
806
elif self.is_ignored(path):
808
ie = fk_entries[kind]()
811
ie = fk_entries[kind]()
812
yield (posixpath.relpath(path, from_dir), status, kind,
815
if value is not None:
816
ie = self._get_file_ie(name, path, value, dir_ids[parent])
817
yield (posixpath.relpath(path, from_dir), "V", ie.kind, ie)
819
ie = fk_entries[kind]()
820
yield (posixpath.relpath(path, from_dir),
821
("I" if self.is_ignored(path) else "?"), kind, ie)
823
def all_file_ids(self):
824
raise errors.UnsupportedOperation(self.all_file_ids, self)
826
def all_versioned_paths(self):
827
with self.lock_read():
829
for path in self.index:
830
if self.mapping.is_special_file(path):
832
path = path.decode("utf-8")
835
path = posixpath.dirname(path).strip("/")
841
def iter_child_entries(self, path):
842
encoded_path = path.encode('utf-8')
843
with self.lock_read():
844
parent_id = self.path2id(path)
846
for item_path, value in self.index.iteritems():
847
decoded_item_path = item_path.decode('utf-8')
848
if self.mapping.is_special_file(item_path):
850
if not osutils.is_inside(path, decoded_item_path):
853
subpath = posixpath.relpath(decoded_item_path, path)
855
dirname = subpath.split('/', 1)[0]
856
file_ie = self._get_dir_ie(
857
posixpath.join(path, dirname), parent_id)
859
(unused_parent, name) = posixpath.split(decoded_item_path)
860
file_ie = self._get_file_ie(
861
name, decoded_item_path, value, parent_id)
863
if not found_any and path != u'':
864
raise errors.NoSuchFile(path)
867
with self.lock_read():
868
conflicts = _mod_conflicts.ConflictList()
869
for item_path, value in self.index.iteritems():
870
if value.flags & FLAG_STAGEMASK:
871
conflicts.append(_mod_conflicts.TextConflict(
872
item_path.decode('utf-8')))
875
def set_conflicts(self, conflicts):
877
for conflict in conflicts:
878
if conflict.typestring in ('text conflict', 'contents conflict'):
879
by_path.add(conflict.path.encode('utf-8'))
881
raise errors.UnsupportedOperation(self.set_conflicts, self)
882
with self.lock_tree_write():
883
for path in self.index:
884
self._set_conflicted(path, path in by_path)
886
def _set_conflicted(self, path, conflicted):
887
trace.mutter('change conflict: %r -> %r', path, conflicted)
888
value = self.index[path]
889
self._index_dirty = True
891
self.index[path] = (value[:9] + (value[9] | FLAG_STAGEMASK, ))
893
self.index[path] = (value[:9] + (value[9] & ~ FLAG_STAGEMASK, ))
895
def add_conflicts(self, new_conflicts):
896
with self.lock_tree_write():
897
for conflict in new_conflicts:
898
if conflict.typestring in ('text conflict',
899
'contents conflict'):
901
self._set_conflicted(
902
conflict.path.encode('utf-8'), True)
904
raise errors.UnsupportedOperation(
905
self.add_conflicts, self)
907
raise errors.UnsupportedOperation(self.add_conflicts, self)
909
def walkdirs(self, prefix=""):
910
"""Walk the directories of this tree.
912
returns a generator which yields items in the form:
913
((curren_directory_path, fileid),
914
[(file1_path, file1_name, file1_kind, (lstat), file1_id,
917
This API returns a generator, which is only valid during the current
918
tree transaction - within a single lock_read or lock_write duration.
920
If the tree is not locked, it may cause an error to be raised,
921
depending on the tree implementation.
923
from bisect import bisect_left
925
disk_top = self.abspath(prefix)
926
if disk_top.endswith('/'):
927
disk_top = disk_top[:-1]
928
top_strip_len = len(disk_top) + 1
929
inventory_iterator = self._walkdirs(prefix)
930
disk_iterator = osutils.walkdirs(disk_top, prefix)
932
current_disk = next(disk_iterator)
933
disk_finished = False
935
if not (e.errno == errno.ENOENT
936
or (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)):
941
current_inv = next(inventory_iterator)
943
except StopIteration:
946
while not inv_finished or not disk_finished:
948
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
949
cur_disk_dir_content) = current_disk
951
((cur_disk_dir_relpath, cur_disk_dir_path_from_top),
952
cur_disk_dir_content) = ((None, None), None)
953
if not disk_finished:
954
# strip out .bzr dirs
955
if (cur_disk_dir_path_from_top[top_strip_len:] == ''
956
and len(cur_disk_dir_content) > 0):
957
# osutils.walkdirs can be made nicer -
958
# yield the path-from-prefix rather than the pathjoined
960
bzrdir_loc = bisect_left(cur_disk_dir_content,
962
if (bzrdir_loc < len(cur_disk_dir_content) and
963
self.controldir.is_control_filename(
964
cur_disk_dir_content[bzrdir_loc][0])):
965
# we dont yield the contents of, or, .bzr itself.
966
del cur_disk_dir_content[bzrdir_loc]
968
# everything is unknown
971
# everything is missing
974
direction = ((current_inv[0][0] > cur_disk_dir_relpath)
975
- (current_inv[0][0] < cur_disk_dir_relpath))
977
# disk is before inventory - unknown
978
dirblock = [(relpath, basename, kind, stat, None, None) for
979
relpath, basename, kind, stat, top_path in
980
cur_disk_dir_content]
981
yield (cur_disk_dir_relpath, None), dirblock
983
current_disk = next(disk_iterator)
984
except StopIteration:
987
# inventory is before disk - missing.
988
dirblock = [(relpath, basename, 'unknown', None, fileid, kind)
989
for relpath, basename, dkind, stat, fileid, kind in
991
yield (current_inv[0][0], current_inv[0][1]), dirblock
993
current_inv = next(inventory_iterator)
994
except StopIteration:
997
# versioned present directory
998
# merge the inventory and disk data together
1000
for relpath, subiterator in itertools.groupby(sorted(
1001
current_inv[1] + cur_disk_dir_content,
1002
key=operator.itemgetter(0)), operator.itemgetter(1)):
1003
path_elements = list(subiterator)
1004
if len(path_elements) == 2:
1005
inv_row, disk_row = path_elements
1006
# versioned, present file
1007
dirblock.append((inv_row[0],
1008
inv_row[1], disk_row[2],
1009
disk_row[3], inv_row[4],
1011
elif len(path_elements[0]) == 5:
1014
(path_elements[0][0], path_elements[0][1],
1015
path_elements[0][2], path_elements[0][3],
1017
elif len(path_elements[0]) == 6:
1018
# versioned, absent file.
1020
(path_elements[0][0], path_elements[0][1],
1021
'unknown', None, path_elements[0][4],
1022
path_elements[0][5]))
1024
raise NotImplementedError('unreachable code')
1025
yield current_inv[0], dirblock
1027
current_inv = next(inventory_iterator)
1028
except StopIteration:
1031
current_disk = next(disk_iterator)
1032
except StopIteration:
1033
disk_finished = True
1035
def _walkdirs(self, prefix=u""):
1038
prefix = prefix.encode('utf-8')
1039
per_dir = defaultdict(set)
1041
per_dir[(u'', self.path2id(''))] = set()
1043
def add_entry(path, kind):
1044
if path == b'' or not path.startswith(prefix):
1046
(dirname, child_name) = posixpath.split(path)
1047
add_entry(dirname, 'directory')
1048
dirname = dirname.decode("utf-8")
1049
dir_file_id = self.path2id(dirname)
1050
if not isinstance(value, tuple) or len(value) != 10:
1051
raise ValueError(value)
1052
per_dir[(dirname, dir_file_id)].add(
1053
(path.decode("utf-8"), child_name.decode("utf-8"),
1055
self.path2id(path.decode("utf-8")),
1057
with self.lock_read():
1058
for path, value in self.index.iteritems():
1059
if self.mapping.is_special_file(path):
1061
if not path.startswith(prefix):
1063
add_entry(path, mode_kind(value.mode))
1064
return ((k, sorted(v)) for (k, v) in sorted(per_dir.items()))
1066
def get_shelf_manager(self):
1067
raise workingtree.ShelvingUnsupported()
1069
def store_uncommitted(self):
1070
raise errors.StoringUncommittedNotSupported(self)
1072
def apply_inventory_delta(self, changes):
1073
for (old_path, new_path, file_id, ie) in changes:
1074
if old_path is not None:
1075
(index, old_subpath) = self._lookup_index(
1076
old_path.encode('utf-8'))
1078
self._index_del_entry(index, old_subpath)
1082
self._versioned_dirs = None
1083
if new_path is not None and ie.kind != 'directory':
1084
if ie.kind == 'tree-reference':
1085
self._index_add_entry(
1087
reference_revision=ie.reference_revision)
1089
self._index_add_entry(new_path, ie.kind)
1092
def annotate_iter(self, path,
1093
default_revision=_mod_revision.CURRENT_REVISION):
1094
"""See Tree.annotate_iter
1096
This implementation will use the basis tree implementation if possible.
1097
Lines not in the basis are attributed to CURRENT_REVISION
1099
If there are pending merges, lines added by those merges will be
1100
incorrectly attributed to CURRENT_REVISION (but after committing, the
1101
attribution will be correct).
1103
with self.lock_read():
1104
maybe_file_parent_keys = []
1105
for parent_id in self.get_parent_ids():
1107
parent_tree = self.revision_tree(parent_id)
1108
except errors.NoSuchRevisionInTree:
1109
parent_tree = self.branch.repository.revision_tree(
1111
with parent_tree.lock_read():
1112
# TODO(jelmer): Use rename/copy tracker to find path name
1116
kind = parent_tree.kind(parent_path)
1117
except errors.NoSuchFile:
1120
# Note: this is slightly unnecessary, because symlinks
1121
# and directories have a "text" which is the empty
1122
# text, and we know that won't mess up annotations. But
1127
parent_tree.get_file_revision(parent_path))
1128
if parent_text_key not in maybe_file_parent_keys:
1129
maybe_file_parent_keys.append(parent_text_key)
1130
# Now we have the parents of this content
1131
from breezy.annotate import Annotator
1132
from .annotate import AnnotateProvider
1133
annotate_provider = AnnotateProvider(
1134
self.branch.repository._file_change_scanner)
1135
annotator = Annotator(annotate_provider)
1137
from breezy.graph import Graph
1138
graph = Graph(annotate_provider)
1139
heads = graph.heads(maybe_file_parent_keys)
1140
file_parent_keys = []
1141
for key in maybe_file_parent_keys:
1143
file_parent_keys.append(key)
1145
text = self.get_file_text(path)
1146
this_key = (path, default_revision)
1147
annotator.add_special_text(this_key, file_parent_keys, text)
1148
annotations = [(key[-1], line)
1149
for key, line in annotator.annotate_flat(this_key)]
1152
def _rename_one(self, from_rel, to_rel):
1153
os.rename(self.abspath(from_rel), self.abspath(to_rel))
1155
def _build_checkout_with_index(self):
1156
build_index_from_tree(
1157
self.user_transport.local_abspath('.'),
1158
self.control_transport.local_abspath("index"),
1161
if self.branch.head is None
1162
else self.store[self.branch.head].tree,
1163
honor_filemode=self._supports_executable())
1165
def reset_state(self, revision_ids=None):
1166
"""Reset the state of the working tree.
1168
This does a hard-reset to a last-known-good state. This is a way to
1169
fix if something got corrupted (like the .git/index file)
1171
with self.lock_tree_write():
1172
if revision_ids is not None:
1173
self.set_parent_ids(revision_ids)
1175
self._index_dirty = True
1176
if self.branch.head is not None:
1177
for entry in self.store.iter_tree_contents(
1178
self.store[self.branch.head].tree):
1179
if not validate_path(entry.path):
1182
if S_ISGITLINK(entry.mode):
1183
pass # TODO(jelmer): record and return submodule paths
1185
# Let's at least try to use the working tree file:
1187
st = self._lstat(self.abspath(
1188
entry.path.decode('utf-8')))
1190
# But if it doesn't exist, we'll make something up.
1191
obj = self.store[entry.sha]
1192
st = os.stat_result((entry.mode, 0, 0, 0,
1194
obj.as_raw_string()), 0,
1196
(index, subpath) = self._lookup_index(entry.path)
1197
index[subpath] = index_entry_from_stat(st, entry.sha, 0)
1199
def _update_git_tree(self, old_revision, new_revision, change_reporter=None,
1201
basis_tree = self.revision_tree(old_revision)
1202
if new_revision != old_revision:
1203
with basis_tree.lock_read():
1204
new_basis_tree = self.branch.basis_tree()
1210
change_reporter=change_reporter,
1211
show_base=show_base)
1213
def pull(self, source, overwrite=False, stop_revision=None,
1214
change_reporter=None, possible_transports=None, local=False,
1216
with self.lock_write(), source.lock_read():
1217
old_revision = self.branch.last_revision()
1218
count = self.branch.pull(source, overwrite, stop_revision,
1219
possible_transports=possible_transports,
1221
self._update_git_tree(
1222
old_revision=old_revision,
1223
new_revision=self.branch.last_revision(),
1224
change_reporter=change_reporter,
1225
show_base=show_base)
1228
def add_reference(self, sub_tree):
1229
"""Add a TreeReference to the tree, pointing at sub_tree.
1231
:param sub_tree: subtree to add.
1233
with self.lock_tree_write():
1235
sub_tree_path = self.relpath(sub_tree.basedir)
1236
except errors.PathNotChild:
1237
raise BadReferenceTarget(
1238
self, sub_tree, 'Target not inside tree.')
1240
self._add([sub_tree_path], [None], ['tree-reference'])
1242
def _read_submodule_head(self, path):
1243
return read_submodule_head(self.abspath(path))
1245
def get_reference_revision(self, path, branch=None):
1246
hexsha = self._read_submodule_head(path)
1248
return _mod_revision.NULL_REVISION
1249
return self.branch.lookup_foreign_revision_id(hexsha)
1251
def get_nested_tree(self, path):
1252
return workingtree.WorkingTree.open(self.abspath(path))
1254
def _directory_is_tree_reference(self, relpath):
1255
# as a special case, if a directory contains control files then
1256
# it's a tree reference, except that the root of the tree is not
1257
return relpath and osutils.lexists(self.abspath(relpath) + u"/.git")
1259
def extract(self, sub_path, format=None):
1260
"""Extract a subtree from this tree.
1262
A new branch will be created, relative to the path for this tree.
1265
segments = osutils.splitpath(path)
1266
transport = self.branch.controldir.root_transport
1267
for name in segments:
1268
transport = transport.clone(name)
1269
transport.ensure_base()
1272
with self.lock_tree_write():
1274
branch_transport = mkdirs(sub_path)
1276
format = self.controldir.cloning_metadir()
1277
branch_transport.ensure_base()
1278
branch_bzrdir = format.initialize_on_transport(branch_transport)
1280
repo = branch_bzrdir.find_repository()
1281
except errors.NoRepositoryPresent:
1282
repo = branch_bzrdir.create_repository()
1283
if not repo.supports_rich_root():
1284
raise errors.RootNotRich()
1285
new_branch = branch_bzrdir.create_branch()
1286
new_branch.pull(self.branch)
1287
for parent_id in self.get_parent_ids():
1288
new_branch.fetch(self.branch, parent_id)
1289
tree_transport = self.controldir.root_transport.clone(sub_path)
1290
if tree_transport.base != branch_transport.base:
1291
tree_bzrdir = format.initialize_on_transport(tree_transport)
1292
tree_bzrdir.set_branch_reference(new_branch)
1294
tree_bzrdir = branch_bzrdir
1295
wt = tree_bzrdir.create_workingtree(_mod_revision.NULL_REVISION)
1296
wt.set_parent_ids(self.get_parent_ids())
1299
def _get_check_refs(self):
1300
"""Return the references needed to perform a check of this tree.
1302
The default implementation returns no refs, and is only suitable for
1303
trees that have no local caching and can commit on ghosts at any time.
1305
:seealso: breezy.check for details about check_refs.
1309
def copy_content_into(self, tree, revision_id=None):
1310
"""Copy the current content and user files of this tree into tree."""
1311
with self.lock_read():
1312
if revision_id is None:
1313
merge.transform_tree(tree, self)
1315
# TODO now merge from tree.last_revision to revision (to
1316
# preserve user local changes)
1318
other_tree = self.revision_tree(revision_id)
1319
except errors.NoSuchRevision:
1320
other_tree = self.branch.repository.revision_tree(
1323
merge.transform_tree(tree, other_tree)
1324
if revision_id == _mod_revision.NULL_REVISION:
1327
new_parents = [revision_id]
1328
tree.set_parent_ids(new_parents)
1330
def reference_parent(self, path, possible_transports=None):
1331
remote_url = self.get_reference_info(path)
1332
if remote_url is None:
1333
trace.warning("Unable to find submodule info for %s", path)
1335
return _mod_branch.Branch.open(remote_url, possible_transports=possible_transports)
1337
def get_reference_info(self, path):
1338
submodule_info = self._submodule_info()
1339
info = submodule_info.get(path.encode('utf-8'))
1342
return info[0].decode('utf-8')
1344
def set_reference_info(self, tree_path, branch_location):
1345
path = self.abspath('.gitmodules')
1347
config = GitConfigFile.from_path(path)
1348
except EnvironmentError as e:
1349
if e.errno == errno.ENOENT:
1350
config = GitConfigFile()
1353
section = (b'submodule', tree_path.encode('utf-8'))
1354
if branch_location is None:
1360
branch_location = urlutils.join(
1361
urlutils.strip_segment_parameters(self.branch.user_url),
1365
b'path', tree_path.encode('utf-8'))
1368
b'url', branch_location.encode('utf-8'))
1369
config.write_to_path(path)
1370
self.add('.gitmodules')
78
1373
class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1375
_tree_class = GitWorkingTree
1377
supports_versioned_directories = False
1379
supports_setting_file_ids = False
1381
supports_store_uncommitted = False
1383
supports_leftmost_parent_id_as_ghost = False
1385
supports_righthand_parent_id_as_ghost = False
1387
requires_normalized_unicode_filenames = True
1389
supports_merge_modified = False
1391
ignore_filename = ".gitignore"
1394
def _matchingcontroldir(self):
1395
from .dir import LocalGitControlDirFormat
1396
return LocalGitControlDirFormat()
80
1398
def get_format_description(self):
81
1399
return "Git Working Tree"
1401
def initialize(self, a_controldir, revision_id=None, from_branch=None,
1402
accelerator_tree=None, hardlink=False):
1403
"""See WorkingTreeFormat.initialize()."""
1404
if not isinstance(a_controldir, LocalGitDir):
1405
raise errors.IncompatibleFormat(self, a_controldir)
1406
branch = a_controldir.open_branch(nascent_ok=True)
1407
if revision_id is not None:
1408
branch.set_last_revision(revision_id)
1409
wt = GitWorkingTree(
1410
a_controldir, a_controldir.open_repository(), branch)
1411
for hook in MutableTree.hooks['post_build_tree']: