355
353
transport = self.worktree_transport
357
355
transport = self.transport
358
self._ensure_dir_exists(urlutils.quote_from_bytes(name))
359
lockname = urlutils.quote_from_bytes(name + b".lock")
356
self._ensure_dir_exists(name)
357
lockname = name + b".lock"
361
local_path = transport.local_abspath(
362
urlutils.quote_from_bytes(name))
359
local_path = self.transport.local_abspath(name)
363
360
except NotLocalUrl:
364
361
# This is racy, but what can we do?
365
if transport.has(lockname):
362
if self.transport.has(lockname):
366
363
raise LockContention(name)
367
transport.put_bytes(lockname, b'Locked by brz-git')
368
return LogicalLockResult(lambda: transport.delete(lockname))
364
lock_result = self.transport.put_bytes(lockname, b'Locked by brz-git')
365
return LogicalLockResult(lambda: self.transport.delete(lockname))
371
368
gf = GitFile(local_path, 'wb')
377
transport.delete(lockname)
374
self.transport.delete(lockname)
378
375
except NoSuchFile:
379
376
raise LockBroken(lockname)
380
# GitFile.abort doesn't care if the lock has already
377
# GitFile.abort doesn't care if the lock has already disappeared
383
379
return LogicalLockResult(unlock)
386
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
387
# rather than bytes..
389
"""Read a ``.git`` file.
391
The first line of the file should start with "gitdir: "
393
:param f: File-like object to read from
397
if not cs.startswith(b"gitdir: "):
398
raise ValueError("Expected file to start with 'gitdir: '")
399
return cs[len(b"gitdir: "):].rstrip(b"\n")
402
382
class TransportRepo(BaseRepo):
404
384
def __init__(self, transport, bare, refs_text=None):
431
410
if refs_text is not None:
432
411
refs_container = InfoRefsContainer(BytesIO(refs_text))
434
head = TransportRefsContainer(
435
self._commontransport).read_loose_ref(b"HEAD")
413
head = TransportRefsContainer(self._commontransport).read_loose_ref("HEAD")
439
refs_container._refs[b"HEAD"] = head
417
refs_container._refs["HEAD"] = head
441
419
refs_container = TransportRefsContainer(
442
self._commontransport, self._controltransport)
420
self._commontransport, self._controltransport)
443
421
super(TransportRepo, self).__init__(object_store,
446
424
def controldir(self):
447
425
return self._controltransport.local_abspath('.')
581
558
except NoSuchFile:
585
562
for l in f.read().splitlines():
588
565
if os.path.isabs(l):
574
# FIXME: Never invalidates.
575
if not self._pack_cache:
576
self._update_pack_cache()
577
return self._pack_cache.values()
593
579
def _update_pack_cache(self):
594
pack_files = set(self._pack_names())
596
for basename in pack_files:
597
pack_name = basename + ".pack"
598
if basename not in self._pack_cache:
600
size = self.pack_transport.stat(pack_name).st_size
601
except TransportNotPossible:
602
f = self.pack_transport.get(pack_name)
603
# TODO(jelmer): Don't read entire file into memory?
604
f = BytesIO(f.read())
605
pd = PackData(pack_name, f)
608
pack_name, self.pack_transport.get(pack_name),
610
idxname = basename + ".idx"
611
idx = load_pack_index_file(
612
idxname, self.pack_transport.get(idxname))
613
pack = Pack.from_objects(pd, idx)
614
pack._basename = basename
615
self._pack_cache[basename] = pack
616
new_packs.append(pack)
617
# Remove disappeared pack files
618
for f in set(self._pack_cache) - pack_files:
619
self._pack_cache.pop(f).close()
580
for pack in self._load_packs():
581
self._pack_cache[pack._basename] = pack
622
583
def _pack_names(self):
625
dir_contents = self.pack_transport.list_dir(".")
626
for name in dir_contents:
627
if name.startswith("pack-") and name.endswith(".pack"):
628
# verify that idx exists first (otherwise the pack was not yet
630
idx_name = os.path.splitext(name)[0] + ".idx"
631
if idx_name in dir_contents:
632
pack_files.append(os.path.splitext(name)[0])
633
except TransportNotPossible:
635
f = self.transport.get('info/packs')
637
warning('No info/packs on remote host;'
638
'run \'git update-server-info\' on remote.')
642
os.path.splitext(name)[0]
643
for name in read_packs_file(f)]
585
f = self.transport.get('info/packs')
644
586
except NoSuchFile:
587
return self.pack_transport.list_dir(".")
590
for line in f.read().splitlines():
593
(kind, name) = line.split(" ", 1)
648
599
def _remove_pack(self, pack):
649
600
self.pack_transport.delete(os.path.basename(pack.index.path))
650
601
self.pack_transport.delete(pack.data.filename)
652
del self._pack_cache[os.path.basename(pack._basename)]
603
def _load_packs(self):
605
for name in self._pack_names():
606
if name.startswith("pack-") and name.endswith(".pack"):
608
size = self.pack_transport.stat(name).st_size
609
except TransportNotPossible:
610
f = self.pack_transport.get(name)
611
pd = PackData(name, f, size=len(contents))
613
pd = PackData(name, self.pack_transport.get(name),
615
idxname = name.replace(".pack", ".idx")
616
idx = load_pack_index_file(idxname, self.pack_transport.get(idxname))
617
pack = Pack.from_objects(pd, idx)
618
pack._basename = idxname[:-4]
656
622
def _iter_loose_objects(self):
657
623
for base in self.transport.list_dir('.'):
658
624
if len(base) != 2:
660
626
for rest in self.transport.list_dir(base):
661
yield (base + rest).encode(sys.getfilesystemencoding())
663
629
def _split_loose_object(self, sha):
664
630
return (sha[:2], sha[2:])
666
632
def _remove_loose_object(self, sha):
667
path = osutils.joinpath(self._split_loose_object(sha))
668
self.transport.delete(urlutils.quote_from_bytes(path))
633
path = '%s/%s' % self._split_loose_object(sha)
634
self.transport.delete(path)
670
636
def _get_loose_object(self, sha):
671
path = osutils.joinpath(self._split_loose_object(sha))
637
path = '%s/%s' % self._split_loose_object(sha)
673
with self.transport.get(urlutils.quote_from_bytes(path)) as f:
674
return ShaFile.from_file(f)
639
return ShaFile.from_file(self.transport.get(path))
675
640
except NoSuchFile:
702
667
p = PackData("", f, len(f.getvalue()))
703
668
entries = p.sorted_entries()
704
basename = "pack-%s" % iter_sha1(entry[0]
705
for entry in entries).decode('ascii')
669
basename = "pack-%s" % iter_sha1(entry[0] for entry in entries)
706
670
p._filename = basename + ".pack"
708
672
self.pack_transport.put_file(basename + ".pack", f)
709
with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
673
idxfile = self.pack_transport.open_write_stream(basename + ".idx")
710
675
write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
711
678
idxfile = self.pack_transport.get(basename + ".idx")
712
idx = load_pack_index_file(basename + ".idx", idxfile)
679
idx = load_pack_index_file(basename+".idx", idxfile)
713
680
final_pack = Pack.from_objects(p, idx)
714
681
final_pack._basename = basename
715
self._add_cached_pack(basename, final_pack)
682
self._add_known_pack(basename, final_pack)
716
683
return final_pack
718
685
def move_in_thin_pack(self, f):
727
694
p = Pack('', resolve_ext_ref=self.get_raw)
728
695
p._data = PackData.from_file(f, len(f.getvalue()))
730
p._idx_load = lambda: MemoryPackIndex(
731
p.data.sorted_entries(), p.data.get_stored_checksum())
697
p._idx_load = lambda: MemoryPackIndex(p.data.sorted_entries(), p.data.get_stored_checksum())
733
699
pack_sha = p.index.objects_sha1()
735
with self.pack_transport.open_write_stream(
736
"pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
701
datafile = self.pack_transport.open_write_stream(
702
"pack-%s.pack" % pack_sha)
737
704
entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
738
707
entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
739
with self.pack_transport.open_write_stream(
740
"pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
708
idxfile = self.pack_transport.open_write_stream(
709
"pack-%s.idx" % pack_sha)
741
711
write_pack_index_v2(idxfile, entries, data_sum)
714
# TODO(jelmer): Just add new pack to the cache
715
self._flush_pack_cache()
743
717
def add_pack(self):
744
718
"""Add a new pack to this object store.