/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/plugins/git/transportgit.py

Merge test-run support.

Show diffs side-by-side

added added

removed removed

Lines of Context:
22
22
 
23
23
import os
24
24
import sys
 
25
import urllib
25
26
 
26
27
from dulwich.errors import (
 
28
    NotGitRepository,
27
29
    NoIndexPresent,
28
30
    )
29
31
from dulwich.file import (
36
38
from dulwich.object_store import (
37
39
    PackBasedObjectStore,
38
40
    PACKDIR,
39
 
    read_packs_file,
40
41
    )
41
42
from dulwich.pack import (
42
43
    MemoryPackIndex,
56
57
    CONTROLDIR,
57
58
    INDEX_FILENAME,
58
59
    OBJECTDIR,
 
60
    REFSDIR,
59
61
    SYMREF,
60
62
    check_ref_format,
 
63
    read_gitfile,
61
64
    read_packed_refs_with_peeled,
62
65
    read_packed_refs,
63
66
    write_packed_refs,
64
67
    )
65
68
 
66
 
from .. import (
67
 
    osutils,
 
69
from ... import (
68
70
    transport as _mod_transport,
69
 
    urlutils,
70
71
    )
71
 
from ..errors import (
 
72
from ...errors import (
72
73
    AlreadyControlDirError,
73
74
    FileExists,
74
75
    LockBroken,
 
76
    LockError,
75
77
    LockContention,
76
78
    NotLocalUrl,
77
79
    NoSuchFile,
79
81
    TransportNotPossible,
80
82
    )
81
83
 
82
 
from ..lock import LogicalLockResult
83
 
from ..trace import warning
 
84
from ...lock import LogicalLockResult
84
85
 
85
86
 
86
87
class TransportRefsContainer(RefsContainer):
98
99
        return "%s(%r)" % (self.__class__.__name__, self.transport)
99
100
 
100
101
    def _ensure_dir_exists(self, path):
101
 
        for n in range(path.count("/")):
102
 
            dirname = "/".join(path.split("/")[:n + 1])
 
102
        for n in range(path.count(b"/")):
 
103
            dirname = b"/".join(path.split(b"/")[:n+1])
103
104
            try:
104
105
                self.transport.mkdir(dirname)
105
106
            except FileExists:
126
127
        except NoSuchFile:
127
128
            pass
128
129
        else:
129
 
            keys.add(b"HEAD")
 
130
            keys.add("HEAD")
130
131
        try:
131
 
            iter_files = list(self.transport.clone(
132
 
                "refs").iter_files_recursive())
 
132
            iter_files = list(self.transport.clone("refs").iter_files_recursive())
133
133
            for filename in iter_files:
134
 
                unquoted_filename = urlutils.unquote_to_bytes(filename)
135
 
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
134
                refname = "refs/%s" % urllib.unquote(filename)
136
135
                if check_ref_format(refname):
137
136
                    keys.add(refname)
138
137
        except (TransportNotPossible, NoSuchFile):
160
159
                return {}
161
160
            try:
162
161
                first_line = next(iter(f)).rstrip()
163
 
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
 
162
                if (first_line.startswith("# pack-refs") and " peeled" in
164
163
                        first_line):
165
164
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
166
165
                        self._packed_refs[name] = sha
178
177
        """Return the cached peeled value of a ref, if available.
179
178
 
180
179
        :param name: Name of the ref to peel
181
 
        :return: The peeled value of the ref. If the ref is known not point to
182
 
            a tag, this will be the SHA the ref refers to. If the ref may point
183
 
            to a tag, but no cached information is available, None is returned.
 
180
        :return: The peeled value of the ref. If the ref is known not point to a
 
181
            tag, this will be the SHA the ref refers to. If the ref may point to
 
182
            a tag, but no cached information is available, None is returned.
184
183
        """
185
184
        self.get_packed_refs()
186
185
        if self._peeled_refs is None or name not in self._packed_refs:
208
207
        else:
209
208
            transport = self.transport
210
209
        try:
211
 
            f = transport.get(urlutils.quote_from_bytes(name))
 
210
            f = transport.get(name)
212
211
        except NoSuchFile:
213
212
            return None
214
 
        with f:
215
 
            try:
216
 
                header = f.read(len(SYMREF))
217
 
            except ReadError:
218
 
                # probably a directory
219
 
                return None
 
213
        try:
 
214
            header = f.read(len(SYMREF))
220
215
            if header == SYMREF:
221
216
                # Read only the first line
222
217
                return header + next(iter(f)).rstrip(b"\r\n")
223
218
            else:
224
219
                # Read only the first 40 bytes
225
 
                return header + f.read(40 - len(SYMREF))
 
220
                return header + f.read(40-len(SYMREF))
 
221
        finally:
 
222
            f.close()
226
223
 
227
224
    def _remove_packed_ref(self, name):
228
225
        if self._packed_refs is None:
238
235
        del self._packed_refs[name]
239
236
        if name in self._peeled_refs:
240
237
            del self._peeled_refs[name]
241
 
        with self.transport.open_write_stream("packed-refs") as f:
 
238
        f = self.transport.open_write_stream("packed-refs")
 
239
        try:
242
240
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
241
        finally:
 
242
            f.close()
243
243
 
244
244
    def set_symbolic_ref(self, name, other):
245
245
        """Make a ref point at another ref.
251
251
        self._check_refname(other)
252
252
        if name != b'HEAD':
253
253
            transport = self.transport
254
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
254
            self._ensure_dir_exists(name)
255
255
        else:
256
256
            transport = self.worktree_transport
257
 
        transport.put_bytes(urlutils.quote_from_bytes(
258
 
            name), SYMREF + other + b'\n')
 
257
        transport.put_bytes(name, SYMREF + other + b'\n')
259
258
 
260
259
    def set_if_equals(self, name, old_ref, new_ref):
261
260
        """Set a refname to new_ref only if it currently equals old_ref.
278
277
            transport = self.worktree_transport
279
278
        else:
280
279
            transport = self.transport
281
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
282
 
        transport.put_bytes(urlutils.quote_from_bytes(
283
 
            realname), new_ref + b"\n")
 
280
            self._ensure_dir_exists(realname)
 
281
        transport.put_bytes(realname, new_ref+"\n")
284
282
        return True
285
283
 
286
284
    def add_if_new(self, name, ref):
305
303
            transport = self.worktree_transport
306
304
        else:
307
305
            transport = self.transport
308
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
309
 
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
 
306
            self._ensure_dir_exists(realname)
 
307
        transport.put_bytes(realname, ref+"\n")
310
308
        return True
311
309
 
312
310
    def remove_if_equals(self, name, old_ref):
316
314
        perform an atomic compare-and-delete operation.
317
315
 
318
316
        :param name: The refname to delete.
319
 
        :param old_ref: The old sha the refname must refer to, or None to
320
 
            delete unconditionally.
 
317
        :param old_ref: The old sha the refname must refer to, or None to delete
 
318
            unconditionally.
321
319
        :return: True if the delete was successful, False otherwise.
322
320
        """
323
321
        self._check_refname(name)
327
325
        else:
328
326
            transport = self.transport
329
327
        try:
330
 
            transport.delete(urlutils.quote_from_bytes(name))
 
328
            transport.delete(name)
331
329
        except NoSuchFile:
332
330
            pass
333
331
        self._remove_packed_ref(name)
346
344
            transport = self.transport
347
345
        lockname = name + b".lock"
348
346
        try:
349
 
            transport.delete(urlutils.quote_from_bytes(lockname))
 
347
            self.transport.delete(lockname)
350
348
        except NoSuchFile:
351
349
            pass
352
350
 
355
353
            transport = self.worktree_transport
356
354
        else:
357
355
            transport = self.transport
358
 
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
359
 
        lockname = urlutils.quote_from_bytes(name + b".lock")
 
356
        self._ensure_dir_exists(name)
 
357
        lockname = name + b".lock"
360
358
        try:
361
 
            local_path = transport.local_abspath(
362
 
                urlutils.quote_from_bytes(name))
 
359
            local_path = self.transport.local_abspath(name)
363
360
        except NotLocalUrl:
364
361
            # This is racy, but what can we do?
365
 
            if transport.has(lockname):
 
362
            if self.transport.has(lockname):
366
363
                raise LockContention(name)
367
 
            transport.put_bytes(lockname, b'Locked by brz-git')
368
 
            return LogicalLockResult(lambda: transport.delete(lockname))
 
364
            lock_result = self.transport.put_bytes(lockname, b'Locked by brz-git')
 
365
            return LogicalLockResult(lambda: self.transport.delete(lockname))
369
366
        else:
370
367
            try:
371
368
                gf = GitFile(local_path, 'wb')
374
371
            else:
375
372
                def unlock():
376
373
                    try:
377
 
                        transport.delete(lockname)
 
374
                        self.transport.delete(lockname)
378
375
                    except NoSuchFile:
379
376
                        raise LockBroken(lockname)
380
 
                    # GitFile.abort doesn't care if the lock has already
381
 
                    # disappeared
 
377
                    # GitFile.abort doesn't care if the lock has already disappeared
382
378
                    gf.abort()
383
379
                return LogicalLockResult(unlock)
384
380
 
385
381
 
386
 
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
387
 
# rather than bytes..
388
 
def read_gitfile(f):
389
 
    """Read a ``.git`` file.
390
 
 
391
 
    The first line of the file should start with "gitdir: "
392
 
 
393
 
    :param f: File-like object to read from
394
 
    :return: A path
395
 
    """
396
 
    cs = f.read()
397
 
    if not cs.startswith(b"gitdir: "):
398
 
        raise ValueError("Expected file to start with 'gitdir: '")
399
 
    return cs[len(b"gitdir: "):].rstrip(b"\n")
400
 
 
401
 
 
402
382
class TransportRepo(BaseRepo):
403
383
 
404
384
    def __init__(self, transport, bare, refs_text=None):
413
393
            else:
414
394
                self._controltransport = self.transport.clone('.git')
415
395
        else:
416
 
            self._controltransport = self.transport.clone(
417
 
                urlutils.quote_from_bytes(path))
 
396
            self._controltransport = self.transport.clone(path)
418
397
        commondir = self.get_named_file(COMMONDIR)
419
398
        if commondir is not None:
420
399
            with commondir:
431
410
        if refs_text is not None:
432
411
            refs_container = InfoRefsContainer(BytesIO(refs_text))
433
412
            try:
434
 
                head = TransportRefsContainer(
435
 
                    self._commontransport).read_loose_ref(b"HEAD")
 
413
                head = TransportRefsContainer(self._commontransport).read_loose_ref("HEAD")
436
414
            except KeyError:
437
415
                pass
438
416
            else:
439
 
                refs_container._refs[b"HEAD"] = head
 
417
                refs_container._refs["HEAD"] = head
440
418
        else:
441
419
            refs_container = TransportRefsContainer(
442
 
                self._commontransport, self._controltransport)
 
420
                    self._commontransport, self._controltransport)
443
421
        super(TransportRepo, self).__init__(object_store,
444
 
                                            refs_container)
 
422
                refs_container)
445
423
 
446
424
    def controldir(self):
447
425
        return self._controltransport.local_abspath('.')
497
475
    def get_config(self):
498
476
        from dulwich.config import ConfigFile
499
477
        try:
500
 
            with self._controltransport.get('config') as f:
501
 
                return ConfigFile.from_file(f)
 
478
            return ConfigFile.from_file(self._controltransport.get('config'))
502
479
        except NoSuchFile:
503
480
            return ConfigFile()
504
481
 
581
558
        except NoSuchFile:
582
559
            return []
583
560
        ret = []
584
 
        with f:
 
561
        try:
585
562
            for l in f.read().splitlines():
586
 
                if l[0] == b"#":
 
563
                if l[0] == "#":
587
564
                    continue
588
565
                if os.path.isabs(l):
589
566
                    continue
590
567
                ret.append(l)
591
568
            return ret
 
569
        finally:
 
570
            f.close()
 
571
 
 
572
    @property
 
573
    def packs(self):
 
574
        # FIXME: Never invalidates.
 
575
        if not self._pack_cache:
 
576
            self._update_pack_cache()
 
577
        return self._pack_cache.values()
592
578
 
593
579
    def _update_pack_cache(self):
594
 
        pack_files = set(self._pack_names())
595
 
        new_packs = []
596
 
        for basename in pack_files:
597
 
            pack_name = basename + ".pack"
598
 
            if basename not in self._pack_cache:
599
 
                try:
600
 
                    size = self.pack_transport.stat(pack_name).st_size
601
 
                except TransportNotPossible:
602
 
                    f = self.pack_transport.get(pack_name)
603
 
                    # TODO(jelmer): Don't read entire file into memory?
604
 
                    f = BytesIO(f.read())
605
 
                    pd = PackData(pack_name, f)
606
 
                else:
607
 
                    pd = PackData(
608
 
                        pack_name, self.pack_transport.get(pack_name),
609
 
                        size=size)
610
 
                idxname = basename + ".idx"
611
 
                idx = load_pack_index_file(
612
 
                    idxname, self.pack_transport.get(idxname))
613
 
                pack = Pack.from_objects(pd, idx)
614
 
                pack._basename = basename
615
 
                self._pack_cache[basename] = pack
616
 
                new_packs.append(pack)
617
 
        # Remove disappeared pack files
618
 
        for f in set(self._pack_cache) - pack_files:
619
 
            self._pack_cache.pop(f).close()
620
 
        return new_packs
 
580
        for pack in self._load_packs():
 
581
            self._pack_cache[pack._basename] = pack
621
582
 
622
583
    def _pack_names(self):
623
 
        pack_files = []
624
584
        try:
625
 
            dir_contents = self.pack_transport.list_dir(".")
626
 
            for name in dir_contents:
627
 
                if name.startswith("pack-") and name.endswith(".pack"):
628
 
                    # verify that idx exists first (otherwise the pack was not yet
629
 
                    # fully written)
630
 
                    idx_name = os.path.splitext(name)[0] + ".idx"
631
 
                    if idx_name in dir_contents:
632
 
                        pack_files.append(os.path.splitext(name)[0])
633
 
        except TransportNotPossible:
634
 
            try:
635
 
                f = self.transport.get('info/packs')
636
 
            except NoSuchFile:
637
 
                warning('No info/packs on remote host;'
638
 
                        'run \'git update-server-info\' on remote.')
639
 
            else:
640
 
                with f:
641
 
                    pack_files = [
642
 
                        os.path.splitext(name)[0]
643
 
                        for name in read_packs_file(f)]
 
585
            f = self.transport.get('info/packs')
644
586
        except NoSuchFile:
645
 
            pass
646
 
        return pack_files
 
587
            return self.pack_transport.list_dir(".")
 
588
        else:
 
589
            ret = []
 
590
            for line in f.read().splitlines():
 
591
                if not line:
 
592
                    continue
 
593
                (kind, name) = line.split(" ", 1)
 
594
                if kind != "P":
 
595
                    continue
 
596
                ret.append(name)
 
597
            return ret
647
598
 
648
599
    def _remove_pack(self, pack):
649
600
        self.pack_transport.delete(os.path.basename(pack.index.path))
650
601
        self.pack_transport.delete(pack.data.filename)
651
 
        try:
652
 
            del self._pack_cache[os.path.basename(pack._basename)]
653
 
        except KeyError:
654
 
            pass
 
602
 
 
603
    def _load_packs(self):
 
604
        ret = []
 
605
        for name in self._pack_names():
 
606
            if name.startswith("pack-") and name.endswith(".pack"):
 
607
                try:
 
608
                    size = self.pack_transport.stat(name).st_size
 
609
                except TransportNotPossible:
 
610
                    f = self.pack_transport.get(name)
 
611
                    pd = PackData(name, f, size=len(contents))
 
612
                else:
 
613
                    pd = PackData(name, self.pack_transport.get(name),
 
614
                            size=size)
 
615
                idxname = name.replace(".pack", ".idx")
 
616
                idx = load_pack_index_file(idxname, self.pack_transport.get(idxname))
 
617
                pack = Pack.from_objects(pd, idx)
 
618
                pack._basename = idxname[:-4]
 
619
                ret.append(pack)
 
620
        return ret
655
621
 
656
622
    def _iter_loose_objects(self):
657
623
        for base in self.transport.list_dir('.'):
658
624
            if len(base) != 2:
659
625
                continue
660
626
            for rest in self.transport.list_dir(base):
661
 
                yield (base + rest).encode(sys.getfilesystemencoding())
 
627
                yield base+rest
662
628
 
663
629
    def _split_loose_object(self, sha):
664
630
        return (sha[:2], sha[2:])
665
631
 
666
632
    def _remove_loose_object(self, sha):
667
 
        path = osutils.joinpath(self._split_loose_object(sha))
668
 
        self.transport.delete(urlutils.quote_from_bytes(path))
 
633
        path = '%s/%s' % self._split_loose_object(sha)
 
634
        self.transport.delete(path)
669
635
 
670
636
    def _get_loose_object(self, sha):
671
 
        path = osutils.joinpath(self._split_loose_object(sha))
 
637
        path = '%s/%s' % self._split_loose_object(sha)
672
638
        try:
673
 
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
674
 
                return ShaFile.from_file(f)
 
639
            return ShaFile.from_file(self.transport.get(path))
675
640
        except NoSuchFile:
676
641
            return None
677
642
 
682
647
        """
683
648
        (dir, file) = self._split_loose_object(obj.id)
684
649
        try:
685
 
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
 
650
            self.transport.mkdir(dir)
686
651
        except FileExists:
687
652
            pass
688
 
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
 
653
        path = "%s/%s" % (dir, file)
689
654
        if self.transport.has(path):
690
 
            return  # Already there, no need to write again
 
655
            return # Already there, no need to write again
691
656
        self.transport.put_bytes(path, obj.as_legacy_object())
692
657
 
693
658
    def move_in_pack(self, f):
701
666
        f.seek(0)
702
667
        p = PackData("", f, len(f.getvalue()))
703
668
        entries = p.sorted_entries()
704
 
        basename = "pack-%s" % iter_sha1(entry[0]
705
 
                                         for entry in entries).decode('ascii')
 
669
        basename = "pack-%s" % iter_sha1(entry[0] for entry in entries)
706
670
        p._filename = basename + ".pack"
707
671
        f.seek(0)
708
672
        self.pack_transport.put_file(basename + ".pack", f)
709
 
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
 
673
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
 
674
        try:
710
675
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
676
        finally:
 
677
            idxfile.close()
711
678
        idxfile = self.pack_transport.get(basename + ".idx")
712
 
        idx = load_pack_index_file(basename + ".idx", idxfile)
 
679
        idx = load_pack_index_file(basename+".idx", idxfile)
713
680
        final_pack = Pack.from_objects(p, idx)
714
681
        final_pack._basename = basename
715
 
        self._add_cached_pack(basename, final_pack)
 
682
        self._add_known_pack(basename, final_pack)
716
683
        return final_pack
717
684
 
718
685
    def move_in_thin_pack(self, f):
727
694
        p = Pack('', resolve_ext_ref=self.get_raw)
728
695
        p._data = PackData.from_file(f, len(f.getvalue()))
729
696
        p._data.pack = p
730
 
        p._idx_load = lambda: MemoryPackIndex(
731
 
            p.data.sorted_entries(), p.data.get_stored_checksum())
 
697
        p._idx_load = lambda: MemoryPackIndex(p.data.sorted_entries(), p.data.get_stored_checksum())
732
698
 
733
699
        pack_sha = p.index.objects_sha1()
734
700
 
735
 
        with self.pack_transport.open_write_stream(
736
 
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
 
701
        datafile = self.pack_transport.open_write_stream(
 
702
                "pack-%s.pack" % pack_sha)
 
703
        try:
737
704
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
705
        finally:
 
706
            datafile.close()
738
707
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
739
 
        with self.pack_transport.open_write_stream(
740
 
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
 
708
        idxfile = self.pack_transport.open_write_stream(
 
709
            "pack-%s.idx" % pack_sha)
 
710
        try:
741
711
            write_pack_index_v2(idxfile, entries, data_sum)
 
712
        finally:
 
713
            idxfile.close()
 
714
        # TODO(jelmer): Just add new pack to the cache
 
715
        self._flush_pack_cache()
742
716
 
743
717
    def add_pack(self):
744
718
        """Add a new pack to this object store.
747
721
            call when the pack is finished.
748
722
        """
749
723
        f = BytesIO()
750
 
 
751
724
        def commit():
752
725
            if len(f.getvalue()) > 0:
753
726
                return self.move_in_pack(f)
754
727
            else:
755
728
                return None
756
 
 
757
729
        def abort():
758
730
            return None
759
731
        return f, commit, abort