/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Gustav Hartvigsson
  • Date: 2021-01-09 21:36:27 UTC
  • Revision ID: gustav.hartvigsson@gmail.com-20210109213627-h1xwcutzy9m7a99b
Added 'Case Preserving Working Tree Use Cases' from Canonical Wiki

* Addod a page from the Canonical Bazaar wiki
  with information on the scmeatics of case
  perserving filesystems an a case insensitive
  filesystem works.
  
  * Needs re-work, but this will do as it is the
    same inforamoton as what was on the linked
    page in the currint documentation.

Show diffs side-by-side

added added

removed removed

Lines of Context:
16
16
 
17
17
"""A Git repository implementation that uses a Bazaar transport."""
18
18
 
19
 
from __future__ import absolute_import
20
 
 
21
19
from io import BytesIO
22
20
 
23
21
import os
36
34
from dulwich.object_store import (
37
35
    PackBasedObjectStore,
38
36
    PACKDIR,
 
37
    read_packs_file,
39
38
    )
40
39
from dulwich.pack import (
41
40
    MemoryPackIndex,
79
78
    )
80
79
 
81
80
from ..lock import LogicalLockResult
 
81
from ..trace import warning
82
82
 
83
83
 
84
84
class TransportRefsContainer(RefsContainer):
210
210
        except NoSuchFile:
211
211
            return None
212
212
        with f:
213
 
            header = f.read(len(SYMREF))
 
213
            try:
 
214
                header = f.read(len(SYMREF))
 
215
            except ReadError:
 
216
                # probably a directory
 
217
                return None
214
218
            if header == SYMREF:
215
219
                # Read only the first line
216
220
                return header + next(iter(f)).rstrip(b"\r\n")
232
236
        del self._packed_refs[name]
233
237
        if name in self._peeled_refs:
234
238
            del self._peeled_refs[name]
235
 
        f = self.transport.open_write_stream("packed-refs")
236
 
        try:
 
239
        with self.transport.open_write_stream("packed-refs") as f:
237
240
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
238
 
        finally:
239
 
            f.close()
240
241
 
241
242
    def set_symbolic_ref(self, name, other):
242
243
        """Make a ref point at another ref.
423
424
                    _mod_transport.get_transport_from_path(commondir)
424
425
        else:
425
426
            self._commontransport = self._controltransport
426
 
        object_store = TransportObjectStore(
427
 
            self._commontransport.clone(OBJECTDIR))
 
427
        config = self.get_config()
 
428
        object_store = TransportObjectStore.from_config(
 
429
            self._commontransport.clone(OBJECTDIR),
 
430
            config)
428
431
        if refs_text is not None:
429
432
            refs_container = InfoRefsContainer(BytesIO(refs_text))
430
433
            try:
431
434
                head = TransportRefsContainer(
432
 
                    self._commontransport).read_loose_ref("HEAD")
 
435
                    self._commontransport).read_loose_ref(b"HEAD")
433
436
            except KeyError:
434
437
                pass
435
438
            else:
436
 
                refs_container._refs["HEAD"] = head
 
439
                refs_container._refs[b"HEAD"] = head
437
440
        else:
438
441
            refs_container = TransportRefsContainer(
439
442
                self._commontransport, self._controltransport)
511
514
        backends.extend(StackedConfig.default_backends())
512
515
        return StackedConfig(backends, writable=writable)
513
516
 
 
517
    # Here for compatibility with dulwich < 0.19.17
 
518
    def generate_pack_data(self, have, want, progress=None, ofs_delta=None):
 
519
        """Generate pack data objects for a set of wants/haves.
 
520
 
 
521
        Args:
 
522
          have: List of SHA1s of objects that should not be sent
 
523
          want: List of SHA1s of objects that should be sent
 
524
          ofs_delta: Whether OFS deltas can be included
 
525
          progress: Optional progress reporting method
 
526
        """
 
527
        shallow = self.get_shallow()
 
528
        if shallow:
 
529
            return self.object_store.generate_pack_data(
 
530
                have, want, shallow=shallow,
 
531
                progress=progress, ofs_delta=ofs_delta)
 
532
        else:
 
533
            return self.object_store.generate_pack_data(
 
534
                have, want, progress=progress, ofs_delta=ofs_delta)
 
535
 
514
536
    def __repr__(self):
515
537
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
516
538
 
543
565
class TransportObjectStore(PackBasedObjectStore):
544
566
    """Git-style object store that exists on disk."""
545
567
 
546
 
    def __init__(self, transport):
 
568
    def __init__(self, transport,
 
569
                 loose_compression_level=-1, pack_compression_level=-1):
547
570
        """Open an object store.
548
571
 
549
572
        :param transport: Transport to open data from
550
573
        """
551
574
        super(TransportObjectStore, self).__init__()
 
575
        self.pack_compression_level = pack_compression_level
 
576
        self.loose_compression_level = loose_compression_level
552
577
        self.transport = transport
553
578
        self.pack_transport = self.transport.clone(PACKDIR)
554
579
        self._alternates = None
555
580
 
 
581
    @classmethod
 
582
    def from_config(cls, path, config):
 
583
        try:
 
584
            default_compression_level = int(config.get(
 
585
                (b'core', ), b'compression').decode())
 
586
        except KeyError:
 
587
            default_compression_level = -1
 
588
        try:
 
589
            loose_compression_level = int(config.get(
 
590
                (b'core', ), b'looseCompression').decode())
 
591
        except KeyError:
 
592
            loose_compression_level = default_compression_level
 
593
        try:
 
594
            pack_compression_level = int(config.get(
 
595
                (b'core', ), 'packCompression').decode())
 
596
        except KeyError:
 
597
            pack_compression_level = default_compression_level
 
598
        return cls(path, loose_compression_level, pack_compression_level)
 
599
 
556
600
    def __eq__(self, other):
557
601
        if not isinstance(other, TransportObjectStore):
558
602
            return False
587
631
                ret.append(l)
588
632
            return ret
589
633
 
590
 
    @property
591
 
    def packs(self):
592
 
        # FIXME: Never invalidates.
593
 
        if not self._pack_cache:
594
 
            self._update_pack_cache()
595
 
        return self._pack_cache.values()
596
 
 
597
634
    def _update_pack_cache(self):
598
 
        for pack in self._load_packs():
599
 
            self._pack_cache[pack._basename] = pack
 
635
        pack_files = set(self._pack_names())
 
636
        new_packs = []
 
637
        for basename in pack_files:
 
638
            pack_name = basename + ".pack"
 
639
            if basename not in self._pack_cache:
 
640
                try:
 
641
                    size = self.pack_transport.stat(pack_name).st_size
 
642
                except TransportNotPossible:
 
643
                    f = self.pack_transport.get(pack_name)
 
644
                    # TODO(jelmer): Don't read entire file into memory?
 
645
                    f = BytesIO(f.read())
 
646
                    pd = PackData(pack_name, f)
 
647
                else:
 
648
                    pd = PackData(
 
649
                        pack_name, self.pack_transport.get(pack_name),
 
650
                        size=size)
 
651
                idxname = basename + ".idx"
 
652
                idx = load_pack_index_file(
 
653
                    idxname, self.pack_transport.get(idxname))
 
654
                pack = Pack.from_objects(pd, idx)
 
655
                pack._basename = basename
 
656
                self._pack_cache[basename] = pack
 
657
                new_packs.append(pack)
 
658
        # Remove disappeared pack files
 
659
        for f in set(self._pack_cache) - pack_files:
 
660
            self._pack_cache.pop(f).close()
 
661
        return new_packs
600
662
 
601
663
    def _pack_names(self):
 
664
        pack_files = []
602
665
        try:
603
 
            return self.pack_transport.list_dir(".")
 
666
            dir_contents = self.pack_transport.list_dir(".")
 
667
            for name in dir_contents:
 
668
                if name.startswith("pack-") and name.endswith(".pack"):
 
669
                    # verify that idx exists first (otherwise the pack was not yet
 
670
                    # fully written)
 
671
                    idx_name = os.path.splitext(name)[0] + ".idx"
 
672
                    if idx_name in dir_contents:
 
673
                        pack_files.append(os.path.splitext(name)[0])
604
674
        except TransportNotPossible:
605
675
            try:
606
676
                f = self.transport.get('info/packs')
607
677
            except NoSuchFile:
608
 
                # Hmm, warn about running 'git update-server-info' ?
609
 
                return iter([])
 
678
                warning('No info/packs on remote host;'
 
679
                        'run \'git update-server-info\' on remote.')
610
680
            else:
611
 
                # TODO(jelmer): Move to top-level after dulwich
612
 
                # 0.19.7 is released.
613
 
                from dulwich.object_store import read_packs_file
614
681
                with f:
615
 
                    return read_packs_file(f)
 
682
                    pack_files = [
 
683
                        os.path.splitext(name)[0]
 
684
                        for name in read_packs_file(f)]
616
685
        except NoSuchFile:
617
 
            return iter([])
 
686
            pass
 
687
        return pack_files
618
688
 
619
689
    def _remove_pack(self, pack):
620
690
        self.pack_transport.delete(os.path.basename(pack.index.path))
621
691
        self.pack_transport.delete(pack.data.filename)
622
 
 
623
 
    def _load_packs(self):
624
 
        ret = []
625
 
        for name in self._pack_names():
626
 
            if name.startswith("pack-") and name.endswith(".pack"):
627
 
                try:
628
 
                    size = self.pack_transport.stat(name).st_size
629
 
                except TransportNotPossible:
630
 
                    f = self.pack_transport.get(name)
631
 
                    pd = PackData(name, f)
632
 
                else:
633
 
                    pd = PackData(name, self.pack_transport.get(name),
634
 
                                  size=size)
635
 
                idxname = name.replace(".pack", ".idx")
636
 
                idx = load_pack_index_file(
637
 
                    idxname, self.pack_transport.get(idxname))
638
 
                pack = Pack.from_objects(pd, idx)
639
 
                pack._basename = idxname[:-4]
640
 
                ret.append(pack)
641
 
        return ret
 
692
        try:
 
693
            del self._pack_cache[os.path.basename(pack._basename)]
 
694
        except KeyError:
 
695
            pass
642
696
 
643
697
    def _iter_loose_objects(self):
644
698
        for base in self.transport.list_dir('.'):
675
729
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
676
730
        if self.transport.has(path):
677
731
            return  # Already there, no need to write again
678
 
        self.transport.put_bytes(path, obj.as_legacy_object())
 
732
        # Backwards compatibility with Dulwich < 0.20, which doesn't support
 
733
        # the compression_level parameter.
 
734
        if self.loose_compression_level not in (-1, None):
 
735
            raw_string = obj.as_legacy_object(
 
736
                compression_level=self.loose_compression_level)
 
737
        else:
 
738
            raw_string = obj.as_legacy_object()
 
739
        self.transport.put_bytes(path, raw_string)
679
740
 
680
741
    def move_in_pack(self, f):
681
742
        """Move a specific file containing a pack into the pack directory.
693
754
        p._filename = basename + ".pack"
694
755
        f.seek(0)
695
756
        self.pack_transport.put_file(basename + ".pack", f)
696
 
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
697
 
        try:
 
757
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
698
758
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
699
 
        finally:
700
 
            idxfile.close()
701
759
        idxfile = self.pack_transport.get(basename + ".idx")
702
760
        idx = load_pack_index_file(basename + ".idx", idxfile)
703
761
        final_pack = Pack.from_objects(p, idx)
704
762
        final_pack._basename = basename
705
 
        self._add_known_pack(basename, final_pack)
 
763
        self._add_cached_pack(basename, final_pack)
706
764
        return final_pack
707
765
 
708
766
    def move_in_thin_pack(self, f):
722
780
 
723
781
        pack_sha = p.index.objects_sha1()
724
782
 
725
 
        datafile = self.pack_transport.open_write_stream(
726
 
            "pack-%s.pack" % pack_sha.decode('ascii'))
727
 
        try:
 
783
        with self.pack_transport.open_write_stream(
 
784
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
728
785
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
729
 
        finally:
730
 
            datafile.close()
731
786
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
732
 
        idxfile = self.pack_transport.open_write_stream(
733
 
            "pack-%s.idx" % pack_sha.decode('ascii'))
734
 
        try:
 
787
        with self.pack_transport.open_write_stream(
 
788
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
735
789
            write_pack_index_v2(idxfile, entries, data_sum)
736
 
        finally:
737
 
            idxfile.close()
738
 
        # TODO(jelmer): Just add new pack to the cache
739
 
        self._flush_pack_cache()
740
790
 
741
791
    def add_pack(self):
742
792
        """Add a new pack to this object store.