/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/plugins/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2018-05-19 13:16:11 UTC
  • mto: (6968.4.3 git-archive)
  • mto: This revision was merged to the branch mainline in revision 6972.
  • Revision ID: jelmer@jelmer.uk-20180519131611-l9h9ud41j7qg1m03
Move tar/zip to breezy.archive.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2010 Jelmer Vernooij <jelmer@samba.org>
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""A Git repository implementation that uses a Bazaar transport."""
18
18
 
19
 
from cStringIO import StringIO
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
import urllib
20
26
 
21
27
from dulwich.errors import (
22
28
    NotGitRepository,
23
29
    NoIndexPresent,
24
30
    )
 
31
from dulwich.file import (
 
32
    GitFile,
 
33
    FileLocked,
 
34
    )
25
35
from dulwich.objects import (
26
36
    ShaFile,
27
37
    )
33
43
    MemoryPackIndex,
34
44
    PackData,
35
45
    Pack,
36
 
    ThinPackData,
37
46
    iter_sha1,
38
47
    load_pack_index_file,
39
 
    write_pack_data,
 
48
    write_pack_objects,
40
49
    write_pack_index_v2,
41
50
    )
42
51
from dulwich.repo import (
43
52
    BaseRepo,
 
53
    InfoRefsContainer,
44
54
    RefsContainer,
 
55
    BASE_DIRECTORIES,
 
56
    COMMONDIR,
 
57
    CONTROLDIR,
45
58
    INDEX_FILENAME,
46
59
    OBJECTDIR,
47
60
    REFSDIR,
48
61
    SYMREF,
49
62
    check_ref_format,
 
63
    read_gitfile,
50
64
    read_packed_refs_with_peeled,
51
65
    read_packed_refs,
52
66
    write_packed_refs,
53
67
    )
54
68
 
55
 
from bzrlib.errors import (
 
69
from ... import (
 
70
    transport as _mod_transport,
 
71
    )
 
72
from ...errors import (
 
73
    AlreadyControlDirError,
56
74
    FileExists,
 
75
    LockBroken,
 
76
    LockError,
 
77
    LockContention,
 
78
    NotLocalUrl,
57
79
    NoSuchFile,
 
80
    ReadError,
58
81
    TransportNotPossible,
59
82
    )
60
83
 
 
84
from ...lock import LogicalLockResult
 
85
 
61
86
 
62
87
class TransportRefsContainer(RefsContainer):
63
88
    """Refs container that reads refs from a transport."""
64
89
 
65
 
    def __init__(self, transport):
 
90
    def __init__(self, transport, worktree_transport=None):
66
91
        self.transport = transport
 
92
        if worktree_transport is None:
 
93
            worktree_transport = transport
 
94
        self.worktree_transport = worktree_transport
67
95
        self._packed_refs = None
68
96
        self._peeled_refs = None
69
97
 
79
107
                pass
80
108
 
81
109
    def subkeys(self, base):
 
110
        """Refs present in this container under a base.
 
111
 
 
112
        :param base: The base to return refs under.
 
113
        :return: A set of valid refs in this container under the base; the base
 
114
            prefix is stripped from the ref names returned.
 
115
        """
82
116
        keys = set()
83
 
        try:
84
 
            iter_files = self.transport.clone(base).iter_files_recursive()
85
 
            keys.update(("%s/%s" % (base, refname)).strip("/") for 
86
 
                    refname in iter_files if check_ref_format("%s/%s" % (base, refname)))
87
 
        except (TransportNotPossible, NoSuchFile):
88
 
            pass
89
 
        for key in self.get_packed_refs():
90
 
            if key.startswith(base):
91
 
                keys.add(key[len(base):].strip("/"))
 
117
        base_len = len(base) + 1
 
118
        for refname in self.allkeys():
 
119
            if refname.startswith(base):
 
120
                keys.add(refname[base_len:])
92
121
        return keys
93
122
 
94
123
    def allkeys(self):
95
124
        keys = set()
96
 
        if self.transport.has("HEAD"):
 
125
        try:
 
126
            self.worktree_transport.get_bytes("HEAD")
 
127
        except NoSuchFile:
 
128
            pass
 
129
        else:
97
130
            keys.add("HEAD")
98
131
        try:
99
132
            iter_files = list(self.transport.clone("refs").iter_files_recursive())
100
133
            for filename in iter_files:
101
 
                refname = "refs/%s" % filename
 
134
                refname = "refs/%s" % urllib.unquote(filename)
102
135
                if check_ref_format(refname):
103
136
                    keys.add(refname)
104
137
        except (TransportNotPossible, NoSuchFile):
125
158
            except NoSuchFile:
126
159
                return {}
127
160
            try:
128
 
                first_line = iter(f).next().rstrip()
 
161
                first_line = next(iter(f)).rstrip()
129
162
                if (first_line.startswith("# pack-refs") and " peeled" in
130
163
                        first_line):
131
164
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
169
202
            exist.
170
203
        :raises IOError: if any other error occurs
171
204
        """
 
205
        if name == b'HEAD':
 
206
            transport = self.worktree_transport
 
207
        else:
 
208
            transport = self.transport
172
209
        try:
173
 
            f = self.transport.get(name)
 
210
            f = transport.get(name)
174
211
        except NoSuchFile:
175
212
            return None
 
213
        f = BytesIO(f.read())
176
214
        try:
177
215
            header = f.read(len(SYMREF))
178
216
            if header == SYMREF:
179
217
                # Read only the first line
180
 
                return header + iter(f).next().rstrip("\r\n")
 
218
                return header + next(iter(f)).rstrip(b"\r\n")
181
219
            else:
182
220
                # Read only the first 40 bytes
183
221
                return header + f.read(40-len(SYMREF))
212
250
        """
213
251
        self._check_refname(name)
214
252
        self._check_refname(other)
215
 
        self._ensure_dir_exists(name)
216
 
        self.transport.put_bytes(name, SYMREF + other + '\n')
 
253
        if name != b'HEAD':
 
254
            transport = self.transport
 
255
            self._ensure_dir_exists(name)
 
256
        else:
 
257
            transport = self.worktree_transport
 
258
        transport.put_bytes(name, SYMREF + other + b'\n')
217
259
 
218
260
    def set_if_equals(self, name, old_ref, new_ref):
219
261
        """Set a refname to new_ref only if it currently equals old_ref.
228
270
        :return: True if the set was successful, False otherwise.
229
271
        """
230
272
        try:
231
 
            realname, _ = self._follow(name)
232
 
        except KeyError:
 
273
            realnames, _ = self.follow(name)
 
274
            realname = realnames[-1]
 
275
        except (KeyError, IndexError):
233
276
            realname = name
234
 
        self._ensure_dir_exists(realname)
235
 
        self.transport.put_bytes(realname, new_ref+"\n")
 
277
        if realname == b'HEAD':
 
278
            transport = self.worktree_transport
 
279
        else:
 
280
            transport = self.transport
 
281
            self._ensure_dir_exists(realname)
 
282
        transport.put_bytes(realname, new_ref+"\n")
236
283
        return True
237
284
 
238
285
    def add_if_new(self, name, ref):
246
293
        :return: True if the add was successful, False otherwise.
247
294
        """
248
295
        try:
249
 
            realname, contents = self._follow(name)
 
296
            realnames, contents = self.follow(name)
250
297
            if contents is not None:
251
298
                return False
252
 
        except KeyError:
 
299
            realname = realnames[-1]
 
300
        except (KeyError, IndexError):
253
301
            realname = name
254
302
        self._check_refname(realname)
255
 
        self._ensure_dir_exists(realname)
256
 
        self.transport.put_bytes(realname, ref+"\n")
 
303
        if realname == b'HEAD':
 
304
            transport = self.worktree_transport
 
305
        else:
 
306
            transport = self.transport
 
307
            self._ensure_dir_exists(realname)
 
308
        transport.put_bytes(realname, ref+"\n")
257
309
        return True
258
310
 
259
311
    def remove_if_equals(self, name, old_ref):
269
321
        """
270
322
        self._check_refname(name)
271
323
        # may only be packed
 
324
        if name == b'HEAD':
 
325
            transport = self.worktree_transport
 
326
        else:
 
327
            transport = self.transport
272
328
        try:
273
 
            self.transport.delete(name)
 
329
            transport.delete(name)
274
330
        except NoSuchFile:
275
331
            pass
276
332
        self._remove_packed_ref(name)
277
333
        return True
278
334
 
 
335
    def get(self, name, default=None):
 
336
        try:
 
337
            return self[name]
 
338
        except KeyError:
 
339
            return default
 
340
 
 
341
    def unlock_ref(self, name):
 
342
        if name == b"HEAD":
 
343
            transport = self.worktree_transport
 
344
        else:
 
345
            transport = self.transport
 
346
        lockname = name + ".lock"
 
347
        try:
 
348
            self.transport.delete(lockname)
 
349
        except NoSuchFile:
 
350
            pass
 
351
 
 
352
    def lock_ref(self, name):
 
353
        if name == b"HEAD":
 
354
            transport = self.worktree_transport
 
355
        else:
 
356
            transport = self.transport
 
357
        self._ensure_dir_exists(name)
 
358
        lockname = name + ".lock"
 
359
        try:
 
360
            local_path = self.transport.local_abspath(name)
 
361
        except NotLocalUrl:
 
362
            # This is racy, but what can we do?
 
363
            if self.transport.has(lockname):
 
364
                raise LockContention(name)
 
365
            lock_result = self.transport.put_bytes(lockname, b'Locked by brz-git')
 
366
            return LogicalLockResult(lambda: self.transport.delete(lockname))
 
367
        else:
 
368
            try:
 
369
                gf = GitFile(local_path, 'wb')
 
370
            except FileLocked as e:
 
371
                raise LockContention(name, e)
 
372
            else:
 
373
                def unlock():
 
374
                    try:
 
375
                        self.transport.delete(lockname)
 
376
                    except NoSuchFile:
 
377
                        raise LockBroken(lockname)
 
378
                    # GitFile.abort doesn't care if the lock has already disappeared
 
379
                    gf.abort()
 
380
                return LogicalLockResult(unlock)
 
381
 
279
382
 
280
383
class TransportRepo(BaseRepo):
281
384
 
282
 
    def __init__(self, transport):
 
385
    def __init__(self, transport, bare, refs_text=None):
283
386
        self.transport = transport
 
387
        self.bare = bare
284
388
        try:
285
 
            if self.transport.has(".git/%s" % OBJECTDIR):
286
 
                self.bare = False
 
389
            with transport.get(CONTROLDIR) as f:
 
390
                path = read_gitfile(f)
 
391
        except (ReadError, NoSuchFile):
 
392
            if self.bare:
 
393
                self._controltransport = self.transport
 
394
            else:
287
395
                self._controltransport = self.transport.clone('.git')
288
 
            elif self.transport.has_any(["info/refs", OBJECTDIR, REFSDIR]):
289
 
                self.bare = True
290
 
                self._controltransport = self.transport
291
 
            else:
292
 
                raise NotGitRepository(self.transport)
293
 
        except NoSuchFile:
294
 
            raise NotGitRepository(self.transport)
 
396
        else:
 
397
            self._controltransport = self.transport.clone(path)
 
398
        commondir = self.get_named_file(COMMONDIR)
 
399
        if commondir is not None:
 
400
            with commondir:
 
401
                commondir = os.path.join(
 
402
                    self.controldir(),
 
403
                    commondir.read().rstrip(b"\r\n").decode(
 
404
                        sys.getfilesystemencoding()))
 
405
                self._commontransport = \
 
406
                    _mod_transport.get_transport_from_path(commondir)
 
407
        else:
 
408
            self._commontransport = self._controltransport
295
409
        object_store = TransportObjectStore(
296
 
            self._controltransport.clone(OBJECTDIR))
297
 
        super(TransportRepo, self).__init__(object_store, 
298
 
                TransportRefsContainer(self._controltransport))
 
410
            self._commontransport.clone(OBJECTDIR))
 
411
        if refs_text is not None:
 
412
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
413
            try:
 
414
                head = TransportRefsContainer(self._commontransport).read_loose_ref("HEAD")
 
415
            except KeyError:
 
416
                pass
 
417
            else:
 
418
                refs_container._refs["HEAD"] = head
 
419
        else:
 
420
            refs_container = TransportRefsContainer(
 
421
                    self._commontransport, self._controltransport)
 
422
        super(TransportRepo, self).__init__(object_store,
 
423
                refs_container)
 
424
 
 
425
    def controldir(self):
 
426
        return self._controltransport.local_abspath('.')
 
427
 
 
428
    def commondir(self):
 
429
        return self._commontransport.local_abspath('.')
 
430
 
 
431
    @property
 
432
    def path(self):
 
433
        return self.transport.local_abspath('.')
 
434
 
 
435
    def _determine_file_mode(self):
 
436
        # Be consistent with bzr
 
437
        if sys.platform == 'win32':
 
438
            return False
 
439
        return True
299
440
 
300
441
    def get_named_file(self, path):
301
442
        """Get a file from the control dir with a specific name.
312
453
        except NoSuchFile:
313
454
            return None
314
455
 
 
456
    def _put_named_file(self, relpath, contents):
 
457
        self._controltransport.put_bytes(relpath, contents)
 
458
 
315
459
    def index_path(self):
316
460
        """Return the path to the index file."""
317
461
        return self._controltransport.local_abspath(INDEX_FILENAME)
329
473
        # missing index file, which is treated as empty.
330
474
        return not self.bare
331
475
 
 
476
    def get_config(self):
 
477
        from dulwich.config import ConfigFile
 
478
        try:
 
479
            return ConfigFile.from_file(self._controltransport.get('config'))
 
480
        except NoSuchFile:
 
481
            return ConfigFile()
 
482
 
 
483
    def get_config_stack(self):
 
484
        from dulwich.config import StackedConfig
 
485
        backends = []
 
486
        p = self.get_config()
 
487
        if p is not None:
 
488
            backends.append(p)
 
489
            writable = p
 
490
        else:
 
491
            writable = None
 
492
        backends.extend(StackedConfig.default_backends())
 
493
        return StackedConfig(backends, writable=writable)
 
494
 
332
495
    def __repr__(self):
333
496
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
334
497
 
 
498
    @classmethod
 
499
    def init(cls, transport, bare=False):
 
500
        if not bare:
 
501
            try:
 
502
                transport.mkdir(".git")
 
503
            except FileExists:
 
504
                raise AlreadyControlDirError(transport.base)
 
505
            control_transport = transport.clone(".git")
 
506
        else:
 
507
            control_transport = transport
 
508
        for d in BASE_DIRECTORIES:
 
509
            try:
 
510
                control_transport.mkdir("/".join(d))
 
511
            except FileExists:
 
512
                pass
 
513
        try:
 
514
            control_transport.mkdir(OBJECTDIR)
 
515
        except FileExists:
 
516
            raise AlreadyControlDirError(transport.base)
 
517
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
518
        ret = cls(transport, bare)
 
519
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
520
        ret._init_files(bare)
 
521
        return ret
 
522
 
335
523
 
336
524
class TransportObjectStore(PackBasedObjectStore):
337
525
    """Git-style object store that exists on disk."""
344
532
        super(TransportObjectStore, self).__init__()
345
533
        self.transport = transport
346
534
        self.pack_transport = self.transport.clone(PACKDIR)
 
535
        self._alternates = None
 
536
 
 
537
    def __eq__(self, other):
 
538
        if not isinstance(other, TransportObjectStore):
 
539
            return False
 
540
        return self.transport == other.transport
347
541
 
348
542
    def __repr__(self):
349
543
        return "%s(%r)" % (self.__class__.__name__, self.transport)
350
544
 
351
 
    def _pack_cache_stale(self):
352
 
        return False # FIXME
 
545
    @property
 
546
    def alternates(self):
 
547
        if self._alternates is not None:
 
548
            return self._alternates
 
549
        self._alternates = []
 
550
        for path in self._read_alternate_paths():
 
551
            # FIXME: Check path
 
552
            t = _mod_transport.get_transport_from_path(path)
 
553
            self._alternates.append(self.__class__(t))
 
554
        return self._alternates
 
555
 
 
556
    def _read_alternate_paths(self):
 
557
        try:
 
558
            f = self.transport.get("info/alternates")
 
559
        except NoSuchFile:
 
560
            return []
 
561
        ret = []
 
562
        try:
 
563
            for l in f.read().splitlines():
 
564
                if l[0] == "#":
 
565
                    continue
 
566
                if os.path.isabs(l):
 
567
                    continue
 
568
                ret.append(l)
 
569
            return ret
 
570
        finally:
 
571
            f.close()
 
572
 
 
573
    @property
 
574
    def packs(self):
 
575
        # FIXME: Never invalidates.
 
576
        if not self._pack_cache:
 
577
            self._update_pack_cache()
 
578
        return self._pack_cache.values()
 
579
 
 
580
    def _update_pack_cache(self):
 
581
        for pack in self._load_packs():
 
582
            self._pack_cache[pack._basename] = pack
353
583
 
354
584
    def _pack_names(self):
355
585
        try:
358
588
            return self.pack_transport.list_dir(".")
359
589
        else:
360
590
            ret = []
361
 
            for line in f.readlines():
362
 
                line = line.rstrip("\n")
 
591
            for line in f.read().splitlines():
363
592
                if not line:
364
593
                    continue
365
594
                (kind, name) = line.split(" ", 1)
368
597
                ret.append(name)
369
598
            return ret
370
599
 
 
600
    def _remove_pack(self, pack):
 
601
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
602
        self.pack_transport.delete(pack.data.filename)
 
603
 
371
604
    def _load_packs(self):
372
605
        ret = []
373
606
        for name in self._pack_names():
378
611
                    # FIXME: This reads the whole pack file at once
379
612
                    f = self.pack_transport.get(name)
380
613
                    contents = f.read()
381
 
                    pd = PackData(name, StringIO(contents), size=len(contents))
 
614
                    pd = PackData(name, BytesIO(contents), size=len(contents))
382
615
                else:
383
616
                    pd = PackData(name, self.pack_transport.get(name),
384
617
                            size=size)
385
618
                idxname = name.replace(".pack", ".idx")
386
619
                idx = load_pack_index_file(idxname, self.pack_transport.get(idxname))
387
620
                pack = Pack.from_objects(pd, idx)
 
621
                pack._basename = idxname[:-4]
388
622
                ret.append(pack)
389
623
        return ret
390
624
 
433
667
        :param path: Path to the pack file.
434
668
        """
435
669
        f.seek(0)
436
 
        p = PackData(None, f, len(f.getvalue()))
 
670
        p = PackData("", f, len(f.getvalue()))
437
671
        entries = p.sorted_entries()
438
672
        basename = "pack-%s" % iter_sha1(entry[0] for entry in entries)
 
673
        p._filename = basename + ".pack"
439
674
        f.seek(0)
440
675
        self.pack_transport.put_file(basename + ".pack", f)
441
676
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
446
681
        idxfile = self.pack_transport.get(basename + ".idx")
447
682
        idx = load_pack_index_file(basename+".idx", idxfile)
448
683
        final_pack = Pack.from_objects(p, idx)
449
 
        self._add_known_pack(final_pack)
 
684
        final_pack._basename = basename
 
685
        self._add_known_pack(basename, final_pack)
450
686
        return final_pack
451
687
 
452
 
    def add_thin_pack(self):
453
 
        """Add a new thin pack to this object store.
454
 
 
455
 
        Thin packs are packs that contain deltas with parents that exist
456
 
        in a different pack.
457
 
        """
458
 
        from cStringIO import StringIO
459
 
        f = StringIO()
460
 
        def commit():
461
 
            if len(f.getvalue()) > 0:
462
 
                return self.move_in_thin_pack(f)
463
 
            else:
464
 
                return None
465
 
        return f, commit
466
 
 
467
688
    def move_in_thin_pack(self, f):
468
689
        """Move a specific file containing a pack into the pack directory.
469
690
 
473
694
        :param path: Path to the pack file.
474
695
        """
475
696
        f.seek(0)
476
 
        data = ThinPackData.from_file(self.get_raw, f, len(f.getvalue()))
477
 
        idx = MemoryPackIndex(data.sorted_entries(), data.get_stored_checksum())
478
 
        p = Pack.from_objects(data, idx)
479
 
 
480
 
        pack_sha = idx.objects_sha1()
481
 
 
482
 
        datafile = self.pack_transport.open_write_stream("pack-%s.pack" % pack_sha)
 
697
        p = Pack('', resolve_ext_ref=self.get_raw)
 
698
        p._data = PackData.from_file(f, len(f.getvalue()))
 
699
        p._data.pack = p
 
700
        p._idx_load = lambda: MemoryPackIndex(p.data.sorted_entries(), p.data.get_stored_checksum())
 
701
 
 
702
        pack_sha = p.index.objects_sha1()
 
703
 
 
704
        datafile = self.pack_transport.open_write_stream(
 
705
                "pack-%s.pack" % pack_sha)
483
706
        try:
484
 
            entries, data_sum = write_pack_data(datafile, ((o, None) for o in p.iterobjects()), len(p))
 
707
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
485
708
        finally:
486
709
            datafile.close()
487
 
        entries.sort()
488
 
        idxfile = self.pack_transport.open_write_stream("pack-%s.idx" % pack_sha)
 
710
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
711
        idxfile = self.pack_transport.open_write_stream(
 
712
            "pack-%s.idx" % pack_sha)
489
713
        try:
490
 
            write_pack_index_v2(idxfile, data.sorted_entries(), data_sum)
 
714
            write_pack_index_v2(idxfile, entries, data_sum)
491
715
        finally:
492
716
            idxfile.close()
493
 
        final_pack = Pack("pack-%s" % pack_sha)
494
 
        self._add_known_pack(final_pack)
495
 
        return final_pack
496
 
 
497
 
 
 
717
        # TODO(jelmer): Just add new pack to the cache
 
718
        self._flush_pack_cache()
498
719
 
499
720
    def add_pack(self):
500
 
        """Add a new pack to this object store. 
 
721
        """Add a new pack to this object store.
501
722
 
502
 
        :return: Fileobject to write to and a commit function to 
 
723
        :return: Fileobject to write to and a commit function to
503
724
            call when the pack is finished.
504
725
        """
505
 
        from cStringIO import StringIO
506
 
        f = StringIO()
 
726
        f = BytesIO()
507
727
        def commit():
508
728
            if len(f.getvalue()) > 0:
509
729
                return self.move_in_pack(f)
510
730
            else:
511
731
                return None
512
 
        return f, commit
 
732
        def abort():
 
733
            return None
 
734
        return f, commit, abort
513
735
 
514
736
    @classmethod
515
737
    def init(cls, transport):
516
 
        transport.mkdir('info')
517
 
        transport.mkdir(PACKDIR)
 
738
        try:
 
739
            transport.mkdir('info')
 
740
        except FileExists:
 
741
            pass
 
742
        try:
 
743
            transport.mkdir(PACKDIR)
 
744
        except FileExists:
 
745
            pass
518
746
        return cls(transport)