/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2020-09-02 16:35:18 UTC
  • mto: (7490.40.109 work)
  • mto: This revision was merged to the branch mainline in revision 7526.
  • Revision ID: jelmer@jelmer.uk-20200902163518-sy9f4unbboljphgu
Handle duplicate directories entries for git.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""A Git repository implementation that uses a Bazaar transport."""
 
18
 
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
 
 
26
from dulwich.errors import (
 
27
    NoIndexPresent,
 
28
    )
 
29
from dulwich.file import (
 
30
    GitFile,
 
31
    FileLocked,
 
32
    )
 
33
from dulwich.objects import (
 
34
    ShaFile,
 
35
    )
 
36
from dulwich.object_store import (
 
37
    PackBasedObjectStore,
 
38
    PACKDIR,
 
39
    read_packs_file,
 
40
    )
 
41
from dulwich.pack import (
 
42
    MemoryPackIndex,
 
43
    PackData,
 
44
    Pack,
 
45
    iter_sha1,
 
46
    load_pack_index_file,
 
47
    write_pack_objects,
 
48
    write_pack_index_v2,
 
49
    )
 
50
from dulwich.repo import (
 
51
    BaseRepo,
 
52
    InfoRefsContainer,
 
53
    RefsContainer,
 
54
    BASE_DIRECTORIES,
 
55
    COMMONDIR,
 
56
    CONTROLDIR,
 
57
    INDEX_FILENAME,
 
58
    OBJECTDIR,
 
59
    SYMREF,
 
60
    check_ref_format,
 
61
    read_packed_refs_with_peeled,
 
62
    read_packed_refs,
 
63
    write_packed_refs,
 
64
    )
 
65
 
 
66
from .. import (
 
67
    osutils,
 
68
    transport as _mod_transport,
 
69
    urlutils,
 
70
    )
 
71
from ..errors import (
 
72
    AlreadyControlDirError,
 
73
    FileExists,
 
74
    LockBroken,
 
75
    LockContention,
 
76
    NotLocalUrl,
 
77
    NoSuchFile,
 
78
    ReadError,
 
79
    TransportNotPossible,
 
80
    )
 
81
 
 
82
from ..lock import LogicalLockResult
 
83
from ..trace import warning
 
84
 
 
85
 
 
86
class TransportRefsContainer(RefsContainer):
 
87
    """Refs container that reads refs from a transport."""
 
88
 
 
89
    def __init__(self, transport, worktree_transport=None):
 
90
        self.transport = transport
 
91
        if worktree_transport is None:
 
92
            worktree_transport = transport
 
93
        self.worktree_transport = worktree_transport
 
94
        self._packed_refs = None
 
95
        self._peeled_refs = None
 
96
 
 
97
    def __repr__(self):
 
98
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
99
 
 
100
    def _ensure_dir_exists(self, path):
 
101
        for n in range(path.count("/")):
 
102
            dirname = "/".join(path.split("/")[:n + 1])
 
103
            try:
 
104
                self.transport.mkdir(dirname)
 
105
            except FileExists:
 
106
                pass
 
107
 
 
108
    def subkeys(self, base):
 
109
        """Refs present in this container under a base.
 
110
 
 
111
        :param base: The base to return refs under.
 
112
        :return: A set of valid refs in this container under the base; the base
 
113
            prefix is stripped from the ref names returned.
 
114
        """
 
115
        keys = set()
 
116
        base_len = len(base) + 1
 
117
        for refname in self.allkeys():
 
118
            if refname.startswith(base):
 
119
                keys.add(refname[base_len:])
 
120
        return keys
 
121
 
 
122
    def allkeys(self):
 
123
        keys = set()
 
124
        try:
 
125
            self.worktree_transport.get_bytes("HEAD")
 
126
        except NoSuchFile:
 
127
            pass
 
128
        else:
 
129
            keys.add(b"HEAD")
 
130
        try:
 
131
            iter_files = list(self.transport.clone(
 
132
                "refs").iter_files_recursive())
 
133
            for filename in iter_files:
 
134
                unquoted_filename = urlutils.unquote_to_bytes(filename)
 
135
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
136
                if check_ref_format(refname):
 
137
                    keys.add(refname)
 
138
        except (TransportNotPossible, NoSuchFile):
 
139
            pass
 
140
        keys.update(self.get_packed_refs())
 
141
        return keys
 
142
 
 
143
    def get_packed_refs(self):
 
144
        """Get contents of the packed-refs file.
 
145
 
 
146
        :return: Dictionary mapping ref names to SHA1s
 
147
 
 
148
        :note: Will return an empty dictionary when no packed-refs file is
 
149
            present.
 
150
        """
 
151
        # TODO: invalidate the cache on repacking
 
152
        if self._packed_refs is None:
 
153
            # set both to empty because we want _peeled_refs to be
 
154
            # None if and only if _packed_refs is also None.
 
155
            self._packed_refs = {}
 
156
            self._peeled_refs = {}
 
157
            try:
 
158
                f = self.transport.get("packed-refs")
 
159
            except NoSuchFile:
 
160
                return {}
 
161
            try:
 
162
                first_line = next(iter(f)).rstrip()
 
163
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
 
164
                        first_line):
 
165
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
 
166
                        self._packed_refs[name] = sha
 
167
                        if peeled:
 
168
                            self._peeled_refs[name] = peeled
 
169
                else:
 
170
                    f.seek(0)
 
171
                    for sha, name in read_packed_refs(f):
 
172
                        self._packed_refs[name] = sha
 
173
            finally:
 
174
                f.close()
 
175
        return self._packed_refs
 
176
 
 
177
    def get_peeled(self, name):
 
178
        """Return the cached peeled value of a ref, if available.
 
179
 
 
180
        :param name: Name of the ref to peel
 
181
        :return: The peeled value of the ref. If the ref is known not point to
 
182
            a tag, this will be the SHA the ref refers to. If the ref may point
 
183
            to a tag, but no cached information is available, None is returned.
 
184
        """
 
185
        self.get_packed_refs()
 
186
        if self._peeled_refs is None or name not in self._packed_refs:
 
187
            # No cache: no peeled refs were read, or this ref is loose
 
188
            return None
 
189
        if name in self._peeled_refs:
 
190
            return self._peeled_refs[name]
 
191
        else:
 
192
            # Known not peelable
 
193
            return self[name]
 
194
 
 
195
    def read_loose_ref(self, name):
 
196
        """Read a reference file and return its contents.
 
197
 
 
198
        If the reference file a symbolic reference, only read the first line of
 
199
        the file. Otherwise, only read the first 40 bytes.
 
200
 
 
201
        :param name: the refname to read, relative to refpath
 
202
        :return: The contents of the ref file, or None if the file does not
 
203
            exist.
 
204
        :raises IOError: if any other error occurs
 
205
        """
 
206
        if name == b'HEAD':
 
207
            transport = self.worktree_transport
 
208
        else:
 
209
            transport = self.transport
 
210
        try:
 
211
            f = transport.get(urlutils.quote_from_bytes(name))
 
212
        except NoSuchFile:
 
213
            return None
 
214
        with f:
 
215
            try:
 
216
                header = f.read(len(SYMREF))
 
217
            except ReadError:
 
218
                # probably a directory
 
219
                return None
 
220
            if header == SYMREF:
 
221
                # Read only the first line
 
222
                return header + next(iter(f)).rstrip(b"\r\n")
 
223
            else:
 
224
                # Read only the first 40 bytes
 
225
                return header + f.read(40 - len(SYMREF))
 
226
 
 
227
    def _remove_packed_ref(self, name):
 
228
        if self._packed_refs is None:
 
229
            return
 
230
        # reread cached refs from disk, while holding the lock
 
231
 
 
232
        self._packed_refs = None
 
233
        self.get_packed_refs()
 
234
 
 
235
        if name not in self._packed_refs:
 
236
            return
 
237
 
 
238
        del self._packed_refs[name]
 
239
        if name in self._peeled_refs:
 
240
            del self._peeled_refs[name]
 
241
        with self.transport.open_write_stream("packed-refs") as f:
 
242
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
243
 
 
244
    def set_symbolic_ref(self, name, other):
 
245
        """Make a ref point at another ref.
 
246
 
 
247
        :param name: Name of the ref to set
 
248
        :param other: Name of the ref to point at
 
249
        """
 
250
        self._check_refname(name)
 
251
        self._check_refname(other)
 
252
        if name != b'HEAD':
 
253
            transport = self.transport
 
254
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
255
        else:
 
256
            transport = self.worktree_transport
 
257
        transport.put_bytes(urlutils.quote_from_bytes(
 
258
            name), SYMREF + other + b'\n')
 
259
 
 
260
    def set_if_equals(self, name, old_ref, new_ref):
 
261
        """Set a refname to new_ref only if it currently equals old_ref.
 
262
 
 
263
        This method follows all symbolic references, and can be used to perform
 
264
        an atomic compare-and-swap operation.
 
265
 
 
266
        :param name: The refname to set.
 
267
        :param old_ref: The old sha the refname must refer to, or None to set
 
268
            unconditionally.
 
269
        :param new_ref: The new sha the refname will refer to.
 
270
        :return: True if the set was successful, False otherwise.
 
271
        """
 
272
        try:
 
273
            realnames, _ = self.follow(name)
 
274
            realname = realnames[-1]
 
275
        except (KeyError, IndexError):
 
276
            realname = name
 
277
        if realname == b'HEAD':
 
278
            transport = self.worktree_transport
 
279
        else:
 
280
            transport = self.transport
 
281
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
282
        transport.put_bytes(urlutils.quote_from_bytes(
 
283
            realname), new_ref + b"\n")
 
284
        return True
 
285
 
 
286
    def add_if_new(self, name, ref):
 
287
        """Add a new reference only if it does not already exist.
 
288
 
 
289
        This method follows symrefs, and only ensures that the last ref in the
 
290
        chain does not exist.
 
291
 
 
292
        :param name: The refname to set.
 
293
        :param ref: The new sha the refname will refer to.
 
294
        :return: True if the add was successful, False otherwise.
 
295
        """
 
296
        try:
 
297
            realnames, contents = self.follow(name)
 
298
            if contents is not None:
 
299
                return False
 
300
            realname = realnames[-1]
 
301
        except (KeyError, IndexError):
 
302
            realname = name
 
303
        self._check_refname(realname)
 
304
        if realname == b'HEAD':
 
305
            transport = self.worktree_transport
 
306
        else:
 
307
            transport = self.transport
 
308
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
309
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
 
310
        return True
 
311
 
 
312
    def remove_if_equals(self, name, old_ref):
 
313
        """Remove a refname only if it currently equals old_ref.
 
314
 
 
315
        This method does not follow symbolic references. It can be used to
 
316
        perform an atomic compare-and-delete operation.
 
317
 
 
318
        :param name: The refname to delete.
 
319
        :param old_ref: The old sha the refname must refer to, or None to
 
320
            delete unconditionally.
 
321
        :return: True if the delete was successful, False otherwise.
 
322
        """
 
323
        self._check_refname(name)
 
324
        # may only be packed
 
325
        if name == b'HEAD':
 
326
            transport = self.worktree_transport
 
327
        else:
 
328
            transport = self.transport
 
329
        try:
 
330
            transport.delete(urlutils.quote_from_bytes(name))
 
331
        except NoSuchFile:
 
332
            pass
 
333
        self._remove_packed_ref(name)
 
334
        return True
 
335
 
 
336
    def get(self, name, default=None):
 
337
        try:
 
338
            return self[name]
 
339
        except KeyError:
 
340
            return default
 
341
 
 
342
    def unlock_ref(self, name):
 
343
        if name == b"HEAD":
 
344
            transport = self.worktree_transport
 
345
        else:
 
346
            transport = self.transport
 
347
        lockname = name + b".lock"
 
348
        try:
 
349
            transport.delete(urlutils.quote_from_bytes(lockname))
 
350
        except NoSuchFile:
 
351
            pass
 
352
 
 
353
    def lock_ref(self, name):
 
354
        if name == b"HEAD":
 
355
            transport = self.worktree_transport
 
356
        else:
 
357
            transport = self.transport
 
358
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
359
        lockname = urlutils.quote_from_bytes(name + b".lock")
 
360
        try:
 
361
            local_path = transport.local_abspath(
 
362
                urlutils.quote_from_bytes(name))
 
363
        except NotLocalUrl:
 
364
            # This is racy, but what can we do?
 
365
            if transport.has(lockname):
 
366
                raise LockContention(name)
 
367
            transport.put_bytes(lockname, b'Locked by brz-git')
 
368
            return LogicalLockResult(lambda: transport.delete(lockname))
 
369
        else:
 
370
            try:
 
371
                gf = GitFile(local_path, 'wb')
 
372
            except FileLocked as e:
 
373
                raise LockContention(name, e)
 
374
            else:
 
375
                def unlock():
 
376
                    try:
 
377
                        transport.delete(lockname)
 
378
                    except NoSuchFile:
 
379
                        raise LockBroken(lockname)
 
380
                    # GitFile.abort doesn't care if the lock has already
 
381
                    # disappeared
 
382
                    gf.abort()
 
383
                return LogicalLockResult(unlock)
 
384
 
 
385
 
 
386
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
 
387
# rather than bytes..
 
388
def read_gitfile(f):
 
389
    """Read a ``.git`` file.
 
390
 
 
391
    The first line of the file should start with "gitdir: "
 
392
 
 
393
    :param f: File-like object to read from
 
394
    :return: A path
 
395
    """
 
396
    cs = f.read()
 
397
    if not cs.startswith(b"gitdir: "):
 
398
        raise ValueError("Expected file to start with 'gitdir: '")
 
399
    return cs[len(b"gitdir: "):].rstrip(b"\n")
 
400
 
 
401
 
 
402
class TransportRepo(BaseRepo):
 
403
 
 
404
    def __init__(self, transport, bare, refs_text=None):
 
405
        self.transport = transport
 
406
        self.bare = bare
 
407
        try:
 
408
            with transport.get(CONTROLDIR) as f:
 
409
                path = read_gitfile(f)
 
410
        except (ReadError, NoSuchFile):
 
411
            if self.bare:
 
412
                self._controltransport = self.transport
 
413
            else:
 
414
                self._controltransport = self.transport.clone('.git')
 
415
        else:
 
416
            self._controltransport = self.transport.clone(
 
417
                urlutils.quote_from_bytes(path))
 
418
        commondir = self.get_named_file(COMMONDIR)
 
419
        if commondir is not None:
 
420
            with commondir:
 
421
                commondir = os.path.join(
 
422
                    self.controldir(),
 
423
                    commondir.read().rstrip(b"\r\n").decode(
 
424
                        sys.getfilesystemencoding()))
 
425
                self._commontransport = \
 
426
                    _mod_transport.get_transport_from_path(commondir)
 
427
        else:
 
428
            self._commontransport = self._controltransport
 
429
        config = self.get_config()
 
430
        object_store = TransportObjectStore.from_config(
 
431
            self._commontransport.clone(OBJECTDIR),
 
432
            config)
 
433
        if refs_text is not None:
 
434
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
435
            try:
 
436
                head = TransportRefsContainer(
 
437
                    self._commontransport).read_loose_ref(b"HEAD")
 
438
            except KeyError:
 
439
                pass
 
440
            else:
 
441
                refs_container._refs[b"HEAD"] = head
 
442
        else:
 
443
            refs_container = TransportRefsContainer(
 
444
                self._commontransport, self._controltransport)
 
445
        super(TransportRepo, self).__init__(object_store,
 
446
                                            refs_container)
 
447
 
 
448
    def controldir(self):
 
449
        return self._controltransport.local_abspath('.')
 
450
 
 
451
    def commondir(self):
 
452
        return self._commontransport.local_abspath('.')
 
453
 
 
454
    @property
 
455
    def path(self):
 
456
        return self.transport.local_abspath('.')
 
457
 
 
458
    def _determine_file_mode(self):
 
459
        # Be consistent with bzr
 
460
        if sys.platform == 'win32':
 
461
            return False
 
462
        return True
 
463
 
 
464
    def get_named_file(self, path):
 
465
        """Get a file from the control dir with a specific name.
 
466
 
 
467
        Although the filename should be interpreted as a filename relative to
 
468
        the control dir in a disk-baked Repo, the object returned need not be
 
469
        pointing to a file in that location.
 
470
 
 
471
        :param path: The path to the file, relative to the control dir.
 
472
        :return: An open file object, or None if the file does not exist.
 
473
        """
 
474
        try:
 
475
            return self._controltransport.get(path.lstrip('/'))
 
476
        except NoSuchFile:
 
477
            return None
 
478
 
 
479
    def _put_named_file(self, relpath, contents):
 
480
        self._controltransport.put_bytes(relpath, contents)
 
481
 
 
482
    def index_path(self):
 
483
        """Return the path to the index file."""
 
484
        return self._controltransport.local_abspath(INDEX_FILENAME)
 
485
 
 
486
    def open_index(self):
 
487
        """Open the index for this repository."""
 
488
        from dulwich.index import Index
 
489
        if not self.has_index():
 
490
            raise NoIndexPresent()
 
491
        return Index(self.index_path())
 
492
 
 
493
    def has_index(self):
 
494
        """Check if an index is present."""
 
495
        # Bare repos must never have index files; non-bare repos may have a
 
496
        # missing index file, which is treated as empty.
 
497
        return not self.bare
 
498
 
 
499
    def get_config(self):
 
500
        from dulwich.config import ConfigFile
 
501
        try:
 
502
            with self._controltransport.get('config') as f:
 
503
                return ConfigFile.from_file(f)
 
504
        except NoSuchFile:
 
505
            return ConfigFile()
 
506
 
 
507
    def get_config_stack(self):
 
508
        from dulwich.config import StackedConfig
 
509
        backends = []
 
510
        p = self.get_config()
 
511
        if p is not None:
 
512
            backends.append(p)
 
513
            writable = p
 
514
        else:
 
515
            writable = None
 
516
        backends.extend(StackedConfig.default_backends())
 
517
        return StackedConfig(backends, writable=writable)
 
518
 
 
519
    # Here for compatibility with dulwich < 0.19.17
 
520
    def generate_pack_data(self, have, want, progress=None, ofs_delta=None):
 
521
        """Generate pack data objects for a set of wants/haves.
 
522
 
 
523
        Args:
 
524
          have: List of SHA1s of objects that should not be sent
 
525
          want: List of SHA1s of objects that should be sent
 
526
          ofs_delta: Whether OFS deltas can be included
 
527
          progress: Optional progress reporting method
 
528
        """
 
529
        shallow = self.get_shallow()
 
530
        if shallow:
 
531
            return self.object_store.generate_pack_data(
 
532
                have, want, shallow=shallow,
 
533
                progress=progress, ofs_delta=ofs_delta)
 
534
        else:
 
535
            return self.object_store.generate_pack_data(
 
536
                have, want, progress=progress, ofs_delta=ofs_delta)
 
537
 
 
538
    def __repr__(self):
 
539
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
 
540
 
 
541
    @classmethod
 
542
    def init(cls, transport, bare=False):
 
543
        if not bare:
 
544
            try:
 
545
                transport.mkdir(".git")
 
546
            except FileExists:
 
547
                raise AlreadyControlDirError(transport.base)
 
548
            control_transport = transport.clone(".git")
 
549
        else:
 
550
            control_transport = transport
 
551
        for d in BASE_DIRECTORIES:
 
552
            try:
 
553
                control_transport.mkdir("/".join(d))
 
554
            except FileExists:
 
555
                pass
 
556
        try:
 
557
            control_transport.mkdir(OBJECTDIR)
 
558
        except FileExists:
 
559
            raise AlreadyControlDirError(transport.base)
 
560
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
561
        ret = cls(transport, bare)
 
562
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
563
        ret._init_files(bare)
 
564
        return ret
 
565
 
 
566
 
 
567
class TransportObjectStore(PackBasedObjectStore):
 
568
    """Git-style object store that exists on disk."""
 
569
 
 
570
    def __init__(self, transport,
 
571
                 loose_compression_level=-1, pack_compression_level=-1):
 
572
        """Open an object store.
 
573
 
 
574
        :param transport: Transport to open data from
 
575
        """
 
576
        super(TransportObjectStore, self).__init__()
 
577
        self.pack_compression_level = pack_compression_level
 
578
        self.loose_compression_level = loose_compression_level
 
579
        self.transport = transport
 
580
        self.pack_transport = self.transport.clone(PACKDIR)
 
581
        self._alternates = None
 
582
 
 
583
    @classmethod
 
584
    def from_config(cls, path, config):
 
585
        try:
 
586
            default_compression_level = int(config.get(
 
587
                (b'core', ), b'compression').decode())
 
588
        except KeyError:
 
589
            default_compression_level = -1
 
590
        try:
 
591
            loose_compression_level = int(config.get(
 
592
                (b'core', ), b'looseCompression').decode())
 
593
        except KeyError:
 
594
            loose_compression_level = default_compression_level
 
595
        try:
 
596
            pack_compression_level = int(config.get(
 
597
                (b'core', ), 'packCompression').decode())
 
598
        except KeyError:
 
599
            pack_compression_level = default_compression_level
 
600
        return cls(path, loose_compression_level, pack_compression_level)
 
601
 
 
602
    def __eq__(self, other):
 
603
        if not isinstance(other, TransportObjectStore):
 
604
            return False
 
605
        return self.transport == other.transport
 
606
 
 
607
    def __repr__(self):
 
608
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
609
 
 
610
    @property
 
611
    def alternates(self):
 
612
        if self._alternates is not None:
 
613
            return self._alternates
 
614
        self._alternates = []
 
615
        for path in self._read_alternate_paths():
 
616
            # FIXME: Check path
 
617
            t = _mod_transport.get_transport_from_path(path)
 
618
            self._alternates.append(self.__class__(t))
 
619
        return self._alternates
 
620
 
 
621
    def _read_alternate_paths(self):
 
622
        try:
 
623
            f = self.transport.get("info/alternates")
 
624
        except NoSuchFile:
 
625
            return []
 
626
        ret = []
 
627
        with f:
 
628
            for l in f.read().splitlines():
 
629
                if l[0] == b"#":
 
630
                    continue
 
631
                if os.path.isabs(l):
 
632
                    continue
 
633
                ret.append(l)
 
634
            return ret
 
635
 
 
636
    def _update_pack_cache(self):
 
637
        pack_files = set(self._pack_names())
 
638
        new_packs = []
 
639
        for basename in pack_files:
 
640
            pack_name = basename + ".pack"
 
641
            if basename not in self._pack_cache:
 
642
                try:
 
643
                    size = self.pack_transport.stat(pack_name).st_size
 
644
                except TransportNotPossible:
 
645
                    f = self.pack_transport.get(pack_name)
 
646
                    # TODO(jelmer): Don't read entire file into memory?
 
647
                    f = BytesIO(f.read())
 
648
                    pd = PackData(pack_name, f)
 
649
                else:
 
650
                    pd = PackData(
 
651
                        pack_name, self.pack_transport.get(pack_name),
 
652
                        size=size)
 
653
                idxname = basename + ".idx"
 
654
                idx = load_pack_index_file(
 
655
                    idxname, self.pack_transport.get(idxname))
 
656
                pack = Pack.from_objects(pd, idx)
 
657
                pack._basename = basename
 
658
                self._pack_cache[basename] = pack
 
659
                new_packs.append(pack)
 
660
        # Remove disappeared pack files
 
661
        for f in set(self._pack_cache) - pack_files:
 
662
            self._pack_cache.pop(f).close()
 
663
        return new_packs
 
664
 
 
665
    def _pack_names(self):
 
666
        pack_files = []
 
667
        try:
 
668
            dir_contents = self.pack_transport.list_dir(".")
 
669
            for name in dir_contents:
 
670
                if name.startswith("pack-") and name.endswith(".pack"):
 
671
                    # verify that idx exists first (otherwise the pack was not yet
 
672
                    # fully written)
 
673
                    idx_name = os.path.splitext(name)[0] + ".idx"
 
674
                    if idx_name in dir_contents:
 
675
                        pack_files.append(os.path.splitext(name)[0])
 
676
        except TransportNotPossible:
 
677
            try:
 
678
                f = self.transport.get('info/packs')
 
679
            except NoSuchFile:
 
680
                warning('No info/packs on remote host;'
 
681
                        'run \'git update-server-info\' on remote.')
 
682
            else:
 
683
                with f:
 
684
                    pack_files = [
 
685
                        os.path.splitext(name)[0]
 
686
                        for name in read_packs_file(f)]
 
687
        except NoSuchFile:
 
688
            pass
 
689
        return pack_files
 
690
 
 
691
    def _remove_pack(self, pack):
 
692
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
693
        self.pack_transport.delete(pack.data.filename)
 
694
        try:
 
695
            del self._pack_cache[os.path.basename(pack._basename)]
 
696
        except KeyError:
 
697
            pass
 
698
 
 
699
    def _iter_loose_objects(self):
 
700
        for base in self.transport.list_dir('.'):
 
701
            if len(base) != 2:
 
702
                continue
 
703
            for rest in self.transport.list_dir(base):
 
704
                yield (base + rest).encode(sys.getfilesystemencoding())
 
705
 
 
706
    def _split_loose_object(self, sha):
 
707
        return (sha[:2], sha[2:])
 
708
 
 
709
    def _remove_loose_object(self, sha):
 
710
        path = osutils.joinpath(self._split_loose_object(sha))
 
711
        self.transport.delete(urlutils.quote_from_bytes(path))
 
712
 
 
713
    def _get_loose_object(self, sha):
 
714
        path = osutils.joinpath(self._split_loose_object(sha))
 
715
        try:
 
716
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
 
717
                return ShaFile.from_file(f)
 
718
        except NoSuchFile:
 
719
            return None
 
720
 
 
721
    def add_object(self, obj):
 
722
        """Add a single object to this object store.
 
723
 
 
724
        :param obj: Object to add
 
725
        """
 
726
        (dir, file) = self._split_loose_object(obj.id)
 
727
        try:
 
728
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
 
729
        except FileExists:
 
730
            pass
 
731
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
 
732
        if self.transport.has(path):
 
733
            return  # Already there, no need to write again
 
734
        # Backwards compatibility with Dulwich < 0.20, which doesn't support
 
735
        # the compression_level parameter.
 
736
        if self.loose_compression_level not in (-1, None):
 
737
            raw_string = obj.as_legacy_object(
 
738
                compression_level=self.loose_compression_level)
 
739
        else:
 
740
            raw_string = obj.as_legacy_object()
 
741
        self.transport.put_bytes(path, raw_string)
 
742
 
 
743
    def move_in_pack(self, f):
 
744
        """Move a specific file containing a pack into the pack directory.
 
745
 
 
746
        :note: The file should be on the same file system as the
 
747
            packs directory.
 
748
 
 
749
        :param path: Path to the pack file.
 
750
        """
 
751
        f.seek(0)
 
752
        p = PackData("", f, len(f.getvalue()))
 
753
        entries = p.sorted_entries()
 
754
        basename = "pack-%s" % iter_sha1(entry[0]
 
755
                                         for entry in entries).decode('ascii')
 
756
        p._filename = basename + ".pack"
 
757
        f.seek(0)
 
758
        self.pack_transport.put_file(basename + ".pack", f)
 
759
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
 
760
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
761
        idxfile = self.pack_transport.get(basename + ".idx")
 
762
        idx = load_pack_index_file(basename + ".idx", idxfile)
 
763
        final_pack = Pack.from_objects(p, idx)
 
764
        final_pack._basename = basename
 
765
        self._add_cached_pack(basename, final_pack)
 
766
        return final_pack
 
767
 
 
768
    def move_in_thin_pack(self, f):
 
769
        """Move a specific file containing a pack into the pack directory.
 
770
 
 
771
        :note: The file should be on the same file system as the
 
772
            packs directory.
 
773
 
 
774
        :param path: Path to the pack file.
 
775
        """
 
776
        f.seek(0)
 
777
        p = Pack('', resolve_ext_ref=self.get_raw)
 
778
        p._data = PackData.from_file(f, len(f.getvalue()))
 
779
        p._data.pack = p
 
780
        p._idx_load = lambda: MemoryPackIndex(
 
781
            p.data.sorted_entries(), p.data.get_stored_checksum())
 
782
 
 
783
        pack_sha = p.index.objects_sha1()
 
784
 
 
785
        with self.pack_transport.open_write_stream(
 
786
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
 
787
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
788
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
789
        with self.pack_transport.open_write_stream(
 
790
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
 
791
            write_pack_index_v2(idxfile, entries, data_sum)
 
792
 
 
793
    def add_pack(self):
 
794
        """Add a new pack to this object store.
 
795
 
 
796
        :return: Fileobject to write to and a commit function to
 
797
            call when the pack is finished.
 
798
        """
 
799
        f = BytesIO()
 
800
 
 
801
        def commit():
 
802
            if len(f.getvalue()) > 0:
 
803
                return self.move_in_pack(f)
 
804
            else:
 
805
                return None
 
806
 
 
807
        def abort():
 
808
            return None
 
809
        return f, commit, abort
 
810
 
 
811
    @classmethod
 
812
    def init(cls, transport):
 
813
        try:
 
814
            transport.mkdir('info')
 
815
        except FileExists:
 
816
            pass
 
817
        try:
 
818
            transport.mkdir(PACKDIR)
 
819
        except FileExists:
 
820
            pass
 
821
        return cls(transport)