/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2019-03-05 07:32:38 UTC
  • mto: (7290.1.21 work)
  • mto: This revision was merged to the branch mainline in revision 7311.
  • Revision ID: jelmer@jelmer.uk-20190305073238-zlqn981opwnqsmzi
Add appveyor configuration.

Show diffs side-by-side

added added

removed removed

Lines of Context:
16
16
 
17
17
"""A Git repository implementation that uses a Bazaar transport."""
18
18
 
 
19
from __future__ import absolute_import
 
20
 
19
21
from io import BytesIO
20
22
 
21
23
import os
78
80
    )
79
81
 
80
82
from ..lock import LogicalLockResult
81
 
from ..trace import warning
82
83
 
83
84
 
84
85
class TransportRefsContainer(RefsContainer):
210
211
        except NoSuchFile:
211
212
            return None
212
213
        with f:
213
 
            try:
214
 
                header = f.read(len(SYMREF))
215
 
            except ReadError:
216
 
                # probably a directory
217
 
                return None
 
214
            header = f.read(len(SYMREF))
218
215
            if header == SYMREF:
219
216
                # Read only the first line
220
217
                return header + next(iter(f)).rstrip(b"\r\n")
236
233
        del self._packed_refs[name]
237
234
        if name in self._peeled_refs:
238
235
            del self._peeled_refs[name]
239
 
        with self.transport.open_write_stream("packed-refs") as f:
 
236
        f = self.transport.open_write_stream("packed-refs")
 
237
        try:
240
238
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
239
        finally:
 
240
            f.close()
241
241
 
242
242
    def set_symbolic_ref(self, name, other):
243
243
        """Make a ref point at another ref.
424
424
                    _mod_transport.get_transport_from_path(commondir)
425
425
        else:
426
426
            self._commontransport = self._controltransport
427
 
        config = self.get_config()
428
 
        object_store = TransportObjectStore.from_config(
429
 
            self._commontransport.clone(OBJECTDIR),
430
 
            config)
 
427
        object_store = TransportObjectStore(
 
428
            self._commontransport.clone(OBJECTDIR))
431
429
        if refs_text is not None:
432
430
            refs_container = InfoRefsContainer(BytesIO(refs_text))
433
431
            try:
434
432
                head = TransportRefsContainer(
435
 
                    self._commontransport).read_loose_ref(b"HEAD")
 
433
                    self._commontransport).read_loose_ref("HEAD")
436
434
            except KeyError:
437
435
                pass
438
436
            else:
439
 
                refs_container._refs[b"HEAD"] = head
 
437
                refs_container._refs["HEAD"] = head
440
438
        else:
441
439
            refs_container = TransportRefsContainer(
442
440
                self._commontransport, self._controltransport)
514
512
        backends.extend(StackedConfig.default_backends())
515
513
        return StackedConfig(backends, writable=writable)
516
514
 
517
 
    # Here for compatibility with dulwich < 0.19.17
518
 
    def generate_pack_data(self, have, want, progress=None, ofs_delta=None):
519
 
        """Generate pack data objects for a set of wants/haves.
520
 
 
521
 
        Args:
522
 
          have: List of SHA1s of objects that should not be sent
523
 
          want: List of SHA1s of objects that should be sent
524
 
          ofs_delta: Whether OFS deltas can be included
525
 
          progress: Optional progress reporting method
526
 
        """
527
 
        shallow = self.get_shallow()
528
 
        if shallow:
529
 
            return self.object_store.generate_pack_data(
530
 
                have, want, shallow=shallow,
531
 
                progress=progress, ofs_delta=ofs_delta)
532
 
        else:
533
 
            return self.object_store.generate_pack_data(
534
 
                have, want, progress=progress, ofs_delta=ofs_delta)
535
 
 
536
515
    def __repr__(self):
537
516
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
538
517
 
565
544
class TransportObjectStore(PackBasedObjectStore):
566
545
    """Git-style object store that exists on disk."""
567
546
 
568
 
    def __init__(self, transport,
569
 
                 loose_compression_level=-1, pack_compression_level=-1):
 
547
    def __init__(self, transport):
570
548
        """Open an object store.
571
549
 
572
550
        :param transport: Transport to open data from
573
551
        """
574
552
        super(TransportObjectStore, self).__init__()
575
 
        self.pack_compression_level = pack_compression_level
576
 
        self.loose_compression_level = loose_compression_level
577
553
        self.transport = transport
578
554
        self.pack_transport = self.transport.clone(PACKDIR)
579
555
        self._alternates = None
580
556
 
581
 
    @classmethod
582
 
    def from_config(cls, path, config):
583
 
        try:
584
 
            default_compression_level = int(config.get(
585
 
                (b'core', ), b'compression').decode())
586
 
        except KeyError:
587
 
            default_compression_level = -1
588
 
        try:
589
 
            loose_compression_level = int(config.get(
590
 
                (b'core', ), b'looseCompression').decode())
591
 
        except KeyError:
592
 
            loose_compression_level = default_compression_level
593
 
        try:
594
 
            pack_compression_level = int(config.get(
595
 
                (b'core', ), 'packCompression').decode())
596
 
        except KeyError:
597
 
            pack_compression_level = default_compression_level
598
 
        return cls(path, loose_compression_level, pack_compression_level)
599
 
 
600
557
    def __eq__(self, other):
601
558
        if not isinstance(other, TransportObjectStore):
602
559
            return False
632
589
            return ret
633
590
 
634
591
    def _update_pack_cache(self):
635
 
        pack_files = set(self._pack_names())
 
592
        pack_files = set()
 
593
        pack_dir_contents = self._pack_names()
 
594
        for name in pack_dir_contents:
 
595
            if name.startswith("pack-") and name.endswith(".pack"):
 
596
                # verify that idx exists first (otherwise the pack was not yet
 
597
                # fully written)
 
598
                idx_name = os.path.splitext(name)[0] + ".idx"
 
599
                if idx_name in pack_dir_contents:
 
600
                    pack_files.add(os.path.splitext(name)[0])
 
601
 
636
602
        new_packs = []
637
603
        for basename in pack_files:
638
604
            pack_name = basename + ".pack"
641
607
                    size = self.pack_transport.stat(pack_name).st_size
642
608
                except TransportNotPossible:
643
609
                    f = self.pack_transport.get(pack_name)
644
 
                    # TODO(jelmer): Don't read entire file into memory?
645
 
                    f = BytesIO(f.read())
646
610
                    pd = PackData(pack_name, f)
647
611
                else:
648
612
                    pd = PackData(
661
625
        return new_packs
662
626
 
663
627
    def _pack_names(self):
664
 
        pack_files = []
665
628
        try:
666
 
            dir_contents = self.pack_transport.list_dir(".")
667
 
            for name in dir_contents:
668
 
                if name.startswith("pack-") and name.endswith(".pack"):
669
 
                    # verify that idx exists first (otherwise the pack was not yet
670
 
                    # fully written)
671
 
                    idx_name = os.path.splitext(name)[0] + ".idx"
672
 
                    if idx_name in dir_contents:
673
 
                        pack_files.append(os.path.splitext(name)[0])
 
629
            return self.pack_transport.list_dir(".")
674
630
        except TransportNotPossible:
675
631
            try:
676
632
                f = self.transport.get('info/packs')
677
633
            except NoSuchFile:
678
 
                warning('No info/packs on remote host;'
679
 
                        'run \'git update-server-info\' on remote.')
 
634
                # Hmm, warn about running 'git update-server-info' ?
 
635
                return iter([])
680
636
            else:
681
637
                with f:
682
 
                    pack_files = [
683
 
                        os.path.splitext(name)[0]
684
 
                        for name in read_packs_file(f)]
 
638
                    return read_packs_file(f)
685
639
        except NoSuchFile:
686
 
            pass
687
 
        return pack_files
 
640
            return iter([])
688
641
 
689
642
    def _remove_pack(self, pack):
690
643
        self.pack_transport.delete(os.path.basename(pack.index.path))
729
682
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
730
683
        if self.transport.has(path):
731
684
            return  # Already there, no need to write again
732
 
        # Backwards compatibility with Dulwich < 0.20, which doesn't support
733
 
        # the compression_level parameter.
734
 
        if self.loose_compression_level not in (-1, None):
735
 
            raw_string = obj.as_legacy_object(
736
 
                compression_level=self.loose_compression_level)
737
 
        else:
738
 
            raw_string = obj.as_legacy_object()
739
 
        self.transport.put_bytes(path, raw_string)
 
685
        self.transport.put_bytes(path, obj.as_legacy_object())
740
686
 
741
687
    def move_in_pack(self, f):
742
688
        """Move a specific file containing a pack into the pack directory.
754
700
        p._filename = basename + ".pack"
755
701
        f.seek(0)
756
702
        self.pack_transport.put_file(basename + ".pack", f)
757
 
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
 
703
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
 
704
        try:
758
705
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
706
        finally:
 
707
            idxfile.close()
759
708
        idxfile = self.pack_transport.get(basename + ".idx")
760
709
        idx = load_pack_index_file(basename + ".idx", idxfile)
761
710
        final_pack = Pack.from_objects(p, idx)
780
729
 
781
730
        pack_sha = p.index.objects_sha1()
782
731
 
783
 
        with self.pack_transport.open_write_stream(
784
 
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
 
732
        datafile = self.pack_transport.open_write_stream(
 
733
            "pack-%s.pack" % pack_sha.decode('ascii'))
 
734
        try:
785
735
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
736
        finally:
 
737
            datafile.close()
786
738
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
787
 
        with self.pack_transport.open_write_stream(
788
 
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
 
739
        idxfile = self.pack_transport.open_write_stream(
 
740
            "pack-%s.idx" % pack_sha.decode('ascii'))
 
741
        try:
789
742
            write_pack_index_v2(idxfile, entries, data_sum)
 
743
        finally:
 
744
            idxfile.close()
790
745
 
791
746
    def add_pack(self):
792
747
        """Add a new pack to this object store.