/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2017-07-23 22:06:41 UTC
  • mfrom: (6738 trunk)
  • mto: This revision was merged to the branch mainline in revision 6739.
  • Revision ID: jelmer@jelmer.uk-20170723220641-69eczax9bmv8d6kk
Merge trunk, address review comments.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
 
#
3
 
# This program is free software; you can redistribute it and/or modify
4
 
# it under the terms of the GNU General Public License as published by
5
 
# the Free Software Foundation; either version 2 of the License, or
6
 
# (at your option) any later version.
7
 
#
8
 
# This program is distributed in the hope that it will be useful,
9
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
 
# GNU General Public License for more details.
12
 
#
13
 
# You should have received a copy of the GNU General Public License
14
 
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
 
 
17
 
"""A Git repository implementation that uses a Bazaar transport."""
18
 
 
19
 
from io import BytesIO
20
 
 
21
 
import os
22
 
import sys
23
 
 
24
 
from dulwich.errors import (
25
 
    NoIndexPresent,
26
 
    )
27
 
from dulwich.file import (
28
 
    GitFile,
29
 
    FileLocked,
30
 
    )
31
 
from dulwich.objects import (
32
 
    ShaFile,
33
 
    )
34
 
from dulwich.object_store import (
35
 
    PackBasedObjectStore,
36
 
    PACKDIR,
37
 
    read_packs_file,
38
 
    )
39
 
from dulwich.pack import (
40
 
    MemoryPackIndex,
41
 
    PackData,
42
 
    Pack,
43
 
    iter_sha1,
44
 
    load_pack_index_file,
45
 
    write_pack_objects,
46
 
    write_pack_index_v2,
47
 
    )
48
 
from dulwich.repo import (
49
 
    BaseRepo,
50
 
    InfoRefsContainer,
51
 
    RefsContainer,
52
 
    BASE_DIRECTORIES,
53
 
    COMMONDIR,
54
 
    CONTROLDIR,
55
 
    INDEX_FILENAME,
56
 
    OBJECTDIR,
57
 
    SYMREF,
58
 
    check_ref_format,
59
 
    read_packed_refs_with_peeled,
60
 
    read_packed_refs,
61
 
    write_packed_refs,
62
 
    )
63
 
 
64
 
from .. import (
65
 
    osutils,
66
 
    transport as _mod_transport,
67
 
    urlutils,
68
 
    )
69
 
from ..errors import (
70
 
    AlreadyControlDirError,
71
 
    FileExists,
72
 
    LockBroken,
73
 
    LockContention,
74
 
    NotLocalUrl,
75
 
    NoSuchFile,
76
 
    ReadError,
77
 
    TransportNotPossible,
78
 
    )
79
 
 
80
 
from ..lock import LogicalLockResult
81
 
from ..trace import warning
82
 
 
83
 
 
84
 
class TransportRefsContainer(RefsContainer):
85
 
    """Refs container that reads refs from a transport."""
86
 
 
87
 
    def __init__(self, transport, worktree_transport=None):
88
 
        self.transport = transport
89
 
        if worktree_transport is None:
90
 
            worktree_transport = transport
91
 
        self.worktree_transport = worktree_transport
92
 
        self._packed_refs = None
93
 
        self._peeled_refs = None
94
 
 
95
 
    def __repr__(self):
96
 
        return "%s(%r)" % (self.__class__.__name__, self.transport)
97
 
 
98
 
    def _ensure_dir_exists(self, path):
99
 
        for n in range(path.count("/")):
100
 
            dirname = "/".join(path.split("/")[:n + 1])
101
 
            try:
102
 
                self.transport.mkdir(dirname)
103
 
            except FileExists:
104
 
                pass
105
 
 
106
 
    def subkeys(self, base):
107
 
        """Refs present in this container under a base.
108
 
 
109
 
        :param base: The base to return refs under.
110
 
        :return: A set of valid refs in this container under the base; the base
111
 
            prefix is stripped from the ref names returned.
112
 
        """
113
 
        keys = set()
114
 
        base_len = len(base) + 1
115
 
        for refname in self.allkeys():
116
 
            if refname.startswith(base):
117
 
                keys.add(refname[base_len:])
118
 
        return keys
119
 
 
120
 
    def allkeys(self):
121
 
        keys = set()
122
 
        try:
123
 
            self.worktree_transport.get_bytes("HEAD")
124
 
        except NoSuchFile:
125
 
            pass
126
 
        else:
127
 
            keys.add(b"HEAD")
128
 
        try:
129
 
            iter_files = list(self.transport.clone(
130
 
                "refs").iter_files_recursive())
131
 
            for filename in iter_files:
132
 
                unquoted_filename = urlutils.unquote_to_bytes(filename)
133
 
                refname = osutils.pathjoin(b"refs", unquoted_filename)
134
 
                if check_ref_format(refname):
135
 
                    keys.add(refname)
136
 
        except (TransportNotPossible, NoSuchFile):
137
 
            pass
138
 
        keys.update(self.get_packed_refs())
139
 
        return keys
140
 
 
141
 
    def get_packed_refs(self):
142
 
        """Get contents of the packed-refs file.
143
 
 
144
 
        :return: Dictionary mapping ref names to SHA1s
145
 
 
146
 
        :note: Will return an empty dictionary when no packed-refs file is
147
 
            present.
148
 
        """
149
 
        # TODO: invalidate the cache on repacking
150
 
        if self._packed_refs is None:
151
 
            # set both to empty because we want _peeled_refs to be
152
 
            # None if and only if _packed_refs is also None.
153
 
            self._packed_refs = {}
154
 
            self._peeled_refs = {}
155
 
            try:
156
 
                f = self.transport.get("packed-refs")
157
 
            except NoSuchFile:
158
 
                return {}
159
 
            try:
160
 
                first_line = next(iter(f)).rstrip()
161
 
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
162
 
                        first_line):
163
 
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
164
 
                        self._packed_refs[name] = sha
165
 
                        if peeled:
166
 
                            self._peeled_refs[name] = peeled
167
 
                else:
168
 
                    f.seek(0)
169
 
                    for sha, name in read_packed_refs(f):
170
 
                        self._packed_refs[name] = sha
171
 
            finally:
172
 
                f.close()
173
 
        return self._packed_refs
174
 
 
175
 
    def get_peeled(self, name):
176
 
        """Return the cached peeled value of a ref, if available.
177
 
 
178
 
        :param name: Name of the ref to peel
179
 
        :return: The peeled value of the ref. If the ref is known not point to
180
 
            a tag, this will be the SHA the ref refers to. If the ref may point
181
 
            to a tag, but no cached information is available, None is returned.
182
 
        """
183
 
        self.get_packed_refs()
184
 
        if self._peeled_refs is None or name not in self._packed_refs:
185
 
            # No cache: no peeled refs were read, or this ref is loose
186
 
            return None
187
 
        if name in self._peeled_refs:
188
 
            return self._peeled_refs[name]
189
 
        else:
190
 
            # Known not peelable
191
 
            return self[name]
192
 
 
193
 
    def read_loose_ref(self, name):
194
 
        """Read a reference file and return its contents.
195
 
 
196
 
        If the reference file a symbolic reference, only read the first line of
197
 
        the file. Otherwise, only read the first 40 bytes.
198
 
 
199
 
        :param name: the refname to read, relative to refpath
200
 
        :return: The contents of the ref file, or None if the file does not
201
 
            exist.
202
 
        :raises IOError: if any other error occurs
203
 
        """
204
 
        if name == b'HEAD':
205
 
            transport = self.worktree_transport
206
 
        else:
207
 
            transport = self.transport
208
 
        try:
209
 
            f = transport.get(urlutils.quote_from_bytes(name))
210
 
        except NoSuchFile:
211
 
            return None
212
 
        with f:
213
 
            try:
214
 
                header = f.read(len(SYMREF))
215
 
            except ReadError:
216
 
                # probably a directory
217
 
                return None
218
 
            if header == SYMREF:
219
 
                # Read only the first line
220
 
                return header + next(iter(f)).rstrip(b"\r\n")
221
 
            else:
222
 
                # Read only the first 40 bytes
223
 
                return header + f.read(40 - len(SYMREF))
224
 
 
225
 
    def _remove_packed_ref(self, name):
226
 
        if self._packed_refs is None:
227
 
            return
228
 
        # reread cached refs from disk, while holding the lock
229
 
 
230
 
        self._packed_refs = None
231
 
        self.get_packed_refs()
232
 
 
233
 
        if name not in self._packed_refs:
234
 
            return
235
 
 
236
 
        del self._packed_refs[name]
237
 
        if name in self._peeled_refs:
238
 
            del self._peeled_refs[name]
239
 
        with self.transport.open_write_stream("packed-refs") as f:
240
 
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
241
 
 
242
 
    def set_symbolic_ref(self, name, other):
243
 
        """Make a ref point at another ref.
244
 
 
245
 
        :param name: Name of the ref to set
246
 
        :param other: Name of the ref to point at
247
 
        """
248
 
        self._check_refname(name)
249
 
        self._check_refname(other)
250
 
        if name != b'HEAD':
251
 
            transport = self.transport
252
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
253
 
        else:
254
 
            transport = self.worktree_transport
255
 
        transport.put_bytes(urlutils.quote_from_bytes(
256
 
            name), SYMREF + other + b'\n')
257
 
 
258
 
    def set_if_equals(self, name, old_ref, new_ref):
259
 
        """Set a refname to new_ref only if it currently equals old_ref.
260
 
 
261
 
        This method follows all symbolic references, and can be used to perform
262
 
        an atomic compare-and-swap operation.
263
 
 
264
 
        :param name: The refname to set.
265
 
        :param old_ref: The old sha the refname must refer to, or None to set
266
 
            unconditionally.
267
 
        :param new_ref: The new sha the refname will refer to.
268
 
        :return: True if the set was successful, False otherwise.
269
 
        """
270
 
        try:
271
 
            realnames, _ = self.follow(name)
272
 
            realname = realnames[-1]
273
 
        except (KeyError, IndexError):
274
 
            realname = name
275
 
        if realname == b'HEAD':
276
 
            transport = self.worktree_transport
277
 
        else:
278
 
            transport = self.transport
279
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
280
 
        transport.put_bytes(urlutils.quote_from_bytes(
281
 
            realname), new_ref + b"\n")
282
 
        return True
283
 
 
284
 
    def add_if_new(self, name, ref):
285
 
        """Add a new reference only if it does not already exist.
286
 
 
287
 
        This method follows symrefs, and only ensures that the last ref in the
288
 
        chain does not exist.
289
 
 
290
 
        :param name: The refname to set.
291
 
        :param ref: The new sha the refname will refer to.
292
 
        :return: True if the add was successful, False otherwise.
293
 
        """
294
 
        try:
295
 
            realnames, contents = self.follow(name)
296
 
            if contents is not None:
297
 
                return False
298
 
            realname = realnames[-1]
299
 
        except (KeyError, IndexError):
300
 
            realname = name
301
 
        self._check_refname(realname)
302
 
        if realname == b'HEAD':
303
 
            transport = self.worktree_transport
304
 
        else:
305
 
            transport = self.transport
306
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
307
 
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
308
 
        return True
309
 
 
310
 
    def remove_if_equals(self, name, old_ref):
311
 
        """Remove a refname only if it currently equals old_ref.
312
 
 
313
 
        This method does not follow symbolic references. It can be used to
314
 
        perform an atomic compare-and-delete operation.
315
 
 
316
 
        :param name: The refname to delete.
317
 
        :param old_ref: The old sha the refname must refer to, or None to
318
 
            delete unconditionally.
319
 
        :return: True if the delete was successful, False otherwise.
320
 
        """
321
 
        self._check_refname(name)
322
 
        # may only be packed
323
 
        if name == b'HEAD':
324
 
            transport = self.worktree_transport
325
 
        else:
326
 
            transport = self.transport
327
 
        try:
328
 
            transport.delete(urlutils.quote_from_bytes(name))
329
 
        except NoSuchFile:
330
 
            pass
331
 
        self._remove_packed_ref(name)
332
 
        return True
333
 
 
334
 
    def get(self, name, default=None):
335
 
        try:
336
 
            return self[name]
337
 
        except KeyError:
338
 
            return default
339
 
 
340
 
    def unlock_ref(self, name):
341
 
        if name == b"HEAD":
342
 
            transport = self.worktree_transport
343
 
        else:
344
 
            transport = self.transport
345
 
        lockname = name + b".lock"
346
 
        try:
347
 
            transport.delete(urlutils.quote_from_bytes(lockname))
348
 
        except NoSuchFile:
349
 
            pass
350
 
 
351
 
    def lock_ref(self, name):
352
 
        if name == b"HEAD":
353
 
            transport = self.worktree_transport
354
 
        else:
355
 
            transport = self.transport
356
 
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
357
 
        lockname = urlutils.quote_from_bytes(name + b".lock")
358
 
        try:
359
 
            local_path = transport.local_abspath(
360
 
                urlutils.quote_from_bytes(name))
361
 
        except NotLocalUrl:
362
 
            # This is racy, but what can we do?
363
 
            if transport.has(lockname):
364
 
                raise LockContention(name)
365
 
            transport.put_bytes(lockname, b'Locked by brz-git')
366
 
            return LogicalLockResult(lambda: transport.delete(lockname))
367
 
        else:
368
 
            try:
369
 
                gf = GitFile(local_path, 'wb')
370
 
            except FileLocked as e:
371
 
                raise LockContention(name, e)
372
 
            else:
373
 
                def unlock():
374
 
                    try:
375
 
                        transport.delete(lockname)
376
 
                    except NoSuchFile:
377
 
                        raise LockBroken(lockname)
378
 
                    # GitFile.abort doesn't care if the lock has already
379
 
                    # disappeared
380
 
                    gf.abort()
381
 
                return LogicalLockResult(unlock)
382
 
 
383
 
 
384
 
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
385
 
# rather than bytes..
386
 
def read_gitfile(f):
387
 
    """Read a ``.git`` file.
388
 
 
389
 
    The first line of the file should start with "gitdir: "
390
 
 
391
 
    :param f: File-like object to read from
392
 
    :return: A path
393
 
    """
394
 
    cs = f.read()
395
 
    if not cs.startswith(b"gitdir: "):
396
 
        raise ValueError("Expected file to start with 'gitdir: '")
397
 
    return cs[len(b"gitdir: "):].rstrip(b"\n")
398
 
 
399
 
 
400
 
class TransportRepo(BaseRepo):
401
 
 
402
 
    def __init__(self, transport, bare, refs_text=None):
403
 
        self.transport = transport
404
 
        self.bare = bare
405
 
        try:
406
 
            with transport.get(CONTROLDIR) as f:
407
 
                path = read_gitfile(f)
408
 
        except (ReadError, NoSuchFile):
409
 
            if self.bare:
410
 
                self._controltransport = self.transport
411
 
            else:
412
 
                self._controltransport = self.transport.clone('.git')
413
 
        else:
414
 
            self._controltransport = self.transport.clone(
415
 
                urlutils.quote_from_bytes(path))
416
 
        commondir = self.get_named_file(COMMONDIR)
417
 
        if commondir is not None:
418
 
            with commondir:
419
 
                commondir = os.path.join(
420
 
                    self.controldir(),
421
 
                    commondir.read().rstrip(b"\r\n").decode(
422
 
                        sys.getfilesystemencoding()))
423
 
                self._commontransport = \
424
 
                    _mod_transport.get_transport_from_path(commondir)
425
 
        else:
426
 
            self._commontransport = self._controltransport
427
 
        config = self.get_config()
428
 
        object_store = TransportObjectStore.from_config(
429
 
            self._commontransport.clone(OBJECTDIR),
430
 
            config)
431
 
        if refs_text is not None:
432
 
            refs_container = InfoRefsContainer(BytesIO(refs_text))
433
 
            try:
434
 
                head = TransportRefsContainer(
435
 
                    self._commontransport).read_loose_ref(b"HEAD")
436
 
            except KeyError:
437
 
                pass
438
 
            else:
439
 
                refs_container._refs[b"HEAD"] = head
440
 
        else:
441
 
            refs_container = TransportRefsContainer(
442
 
                self._commontransport, self._controltransport)
443
 
        super(TransportRepo, self).__init__(object_store,
444
 
                                            refs_container)
445
 
 
446
 
    def controldir(self):
447
 
        return self._controltransport.local_abspath('.')
448
 
 
449
 
    def commondir(self):
450
 
        return self._commontransport.local_abspath('.')
451
 
 
452
 
    @property
453
 
    def path(self):
454
 
        return self.transport.local_abspath('.')
455
 
 
456
 
    def _determine_file_mode(self):
457
 
        # Be consistent with bzr
458
 
        if sys.platform == 'win32':
459
 
            return False
460
 
        return True
461
 
 
462
 
    def get_named_file(self, path):
463
 
        """Get a file from the control dir with a specific name.
464
 
 
465
 
        Although the filename should be interpreted as a filename relative to
466
 
        the control dir in a disk-baked Repo, the object returned need not be
467
 
        pointing to a file in that location.
468
 
 
469
 
        :param path: The path to the file, relative to the control dir.
470
 
        :return: An open file object, or None if the file does not exist.
471
 
        """
472
 
        try:
473
 
            return self._controltransport.get(path.lstrip('/'))
474
 
        except NoSuchFile:
475
 
            return None
476
 
 
477
 
    def _put_named_file(self, relpath, contents):
478
 
        self._controltransport.put_bytes(relpath, contents)
479
 
 
480
 
    def index_path(self):
481
 
        """Return the path to the index file."""
482
 
        return self._controltransport.local_abspath(INDEX_FILENAME)
483
 
 
484
 
    def open_index(self):
485
 
        """Open the index for this repository."""
486
 
        from dulwich.index import Index
487
 
        if not self.has_index():
488
 
            raise NoIndexPresent()
489
 
        return Index(self.index_path())
490
 
 
491
 
    def has_index(self):
492
 
        """Check if an index is present."""
493
 
        # Bare repos must never have index files; non-bare repos may have a
494
 
        # missing index file, which is treated as empty.
495
 
        return not self.bare
496
 
 
497
 
    def get_config(self):
498
 
        from dulwich.config import ConfigFile
499
 
        try:
500
 
            with self._controltransport.get('config') as f:
501
 
                return ConfigFile.from_file(f)
502
 
        except NoSuchFile:
503
 
            return ConfigFile()
504
 
 
505
 
    def get_config_stack(self):
506
 
        from dulwich.config import StackedConfig
507
 
        backends = []
508
 
        p = self.get_config()
509
 
        if p is not None:
510
 
            backends.append(p)
511
 
            writable = p
512
 
        else:
513
 
            writable = None
514
 
        backends.extend(StackedConfig.default_backends())
515
 
        return StackedConfig(backends, writable=writable)
516
 
 
517
 
    # Here for compatibility with dulwich < 0.19.17
518
 
    def generate_pack_data(self, have, want, progress=None, ofs_delta=None):
519
 
        """Generate pack data objects for a set of wants/haves.
520
 
 
521
 
        Args:
522
 
          have: List of SHA1s of objects that should not be sent
523
 
          want: List of SHA1s of objects that should be sent
524
 
          ofs_delta: Whether OFS deltas can be included
525
 
          progress: Optional progress reporting method
526
 
        """
527
 
        shallow = self.get_shallow()
528
 
        if shallow:
529
 
            return self.object_store.generate_pack_data(
530
 
                have, want, shallow=shallow,
531
 
                progress=progress, ofs_delta=ofs_delta)
532
 
        else:
533
 
            return self.object_store.generate_pack_data(
534
 
                have, want, progress=progress, ofs_delta=ofs_delta)
535
 
 
536
 
    def __repr__(self):
537
 
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
538
 
 
539
 
    @classmethod
540
 
    def init(cls, transport, bare=False):
541
 
        if not bare:
542
 
            try:
543
 
                transport.mkdir(".git")
544
 
            except FileExists:
545
 
                raise AlreadyControlDirError(transport.base)
546
 
            control_transport = transport.clone(".git")
547
 
        else:
548
 
            control_transport = transport
549
 
        for d in BASE_DIRECTORIES:
550
 
            try:
551
 
                control_transport.mkdir("/".join(d))
552
 
            except FileExists:
553
 
                pass
554
 
        try:
555
 
            control_transport.mkdir(OBJECTDIR)
556
 
        except FileExists:
557
 
            raise AlreadyControlDirError(transport.base)
558
 
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
559
 
        ret = cls(transport, bare)
560
 
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
561
 
        ret._init_files(bare)
562
 
        return ret
563
 
 
564
 
 
565
 
class TransportObjectStore(PackBasedObjectStore):
566
 
    """Git-style object store that exists on disk."""
567
 
 
568
 
    def __init__(self, transport,
569
 
                 loose_compression_level=-1, pack_compression_level=-1):
570
 
        """Open an object store.
571
 
 
572
 
        :param transport: Transport to open data from
573
 
        """
574
 
        super(TransportObjectStore, self).__init__()
575
 
        self.pack_compression_level = pack_compression_level
576
 
        self.loose_compression_level = loose_compression_level
577
 
        self.transport = transport
578
 
        self.pack_transport = self.transport.clone(PACKDIR)
579
 
        self._alternates = None
580
 
 
581
 
    @classmethod
582
 
    def from_config(cls, path, config):
583
 
        try:
584
 
            default_compression_level = int(config.get(
585
 
                (b'core', ), b'compression').decode())
586
 
        except KeyError:
587
 
            default_compression_level = -1
588
 
        try:
589
 
            loose_compression_level = int(config.get(
590
 
                (b'core', ), b'looseCompression').decode())
591
 
        except KeyError:
592
 
            loose_compression_level = default_compression_level
593
 
        try:
594
 
            pack_compression_level = int(config.get(
595
 
                (b'core', ), 'packCompression').decode())
596
 
        except KeyError:
597
 
            pack_compression_level = default_compression_level
598
 
        return cls(path, loose_compression_level, pack_compression_level)
599
 
 
600
 
    def __eq__(self, other):
601
 
        if not isinstance(other, TransportObjectStore):
602
 
            return False
603
 
        return self.transport == other.transport
604
 
 
605
 
    def __repr__(self):
606
 
        return "%s(%r)" % (self.__class__.__name__, self.transport)
607
 
 
608
 
    @property
609
 
    def alternates(self):
610
 
        if self._alternates is not None:
611
 
            return self._alternates
612
 
        self._alternates = []
613
 
        for path in self._read_alternate_paths():
614
 
            # FIXME: Check path
615
 
            t = _mod_transport.get_transport_from_path(path)
616
 
            self._alternates.append(self.__class__(t))
617
 
        return self._alternates
618
 
 
619
 
    def _read_alternate_paths(self):
620
 
        try:
621
 
            f = self.transport.get("info/alternates")
622
 
        except NoSuchFile:
623
 
            return []
624
 
        ret = []
625
 
        with f:
626
 
            for l in f.read().splitlines():
627
 
                if l[0] == b"#":
628
 
                    continue
629
 
                if os.path.isabs(l):
630
 
                    continue
631
 
                ret.append(l)
632
 
            return ret
633
 
 
634
 
    def _update_pack_cache(self):
635
 
        pack_files = set(self._pack_names())
636
 
        new_packs = []
637
 
        for basename in pack_files:
638
 
            pack_name = basename + ".pack"
639
 
            if basename not in self._pack_cache:
640
 
                try:
641
 
                    size = self.pack_transport.stat(pack_name).st_size
642
 
                except TransportNotPossible:
643
 
                    f = self.pack_transport.get(pack_name)
644
 
                    # TODO(jelmer): Don't read entire file into memory?
645
 
                    f = BytesIO(f.read())
646
 
                    pd = PackData(pack_name, f)
647
 
                else:
648
 
                    pd = PackData(
649
 
                        pack_name, self.pack_transport.get(pack_name),
650
 
                        size=size)
651
 
                idxname = basename + ".idx"
652
 
                idx = load_pack_index_file(
653
 
                    idxname, self.pack_transport.get(idxname))
654
 
                pack = Pack.from_objects(pd, idx)
655
 
                pack._basename = basename
656
 
                self._pack_cache[basename] = pack
657
 
                new_packs.append(pack)
658
 
        # Remove disappeared pack files
659
 
        for f in set(self._pack_cache) - pack_files:
660
 
            self._pack_cache.pop(f).close()
661
 
        return new_packs
662
 
 
663
 
    def _pack_names(self):
664
 
        pack_files = []
665
 
        try:
666
 
            dir_contents = self.pack_transport.list_dir(".")
667
 
            for name in dir_contents:
668
 
                if name.startswith("pack-") and name.endswith(".pack"):
669
 
                    # verify that idx exists first (otherwise the pack was not yet
670
 
                    # fully written)
671
 
                    idx_name = os.path.splitext(name)[0] + ".idx"
672
 
                    if idx_name in dir_contents:
673
 
                        pack_files.append(os.path.splitext(name)[0])
674
 
        except TransportNotPossible:
675
 
            try:
676
 
                f = self.transport.get('info/packs')
677
 
            except NoSuchFile:
678
 
                warning('No info/packs on remote host;'
679
 
                        'run \'git update-server-info\' on remote.')
680
 
            else:
681
 
                with f:
682
 
                    pack_files = [
683
 
                        os.path.splitext(name)[0]
684
 
                        for name in read_packs_file(f)]
685
 
        except NoSuchFile:
686
 
            pass
687
 
        return pack_files
688
 
 
689
 
    def _remove_pack(self, pack):
690
 
        self.pack_transport.delete(os.path.basename(pack.index.path))
691
 
        self.pack_transport.delete(pack.data.filename)
692
 
        try:
693
 
            del self._pack_cache[os.path.basename(pack._basename)]
694
 
        except KeyError:
695
 
            pass
696
 
 
697
 
    def _iter_loose_objects(self):
698
 
        for base in self.transport.list_dir('.'):
699
 
            if len(base) != 2:
700
 
                continue
701
 
            for rest in self.transport.list_dir(base):
702
 
                yield (base + rest).encode(sys.getfilesystemencoding())
703
 
 
704
 
    def _split_loose_object(self, sha):
705
 
        return (sha[:2], sha[2:])
706
 
 
707
 
    def _remove_loose_object(self, sha):
708
 
        path = osutils.joinpath(self._split_loose_object(sha))
709
 
        self.transport.delete(urlutils.quote_from_bytes(path))
710
 
 
711
 
    def _get_loose_object(self, sha):
712
 
        path = osutils.joinpath(self._split_loose_object(sha))
713
 
        try:
714
 
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
715
 
                return ShaFile.from_file(f)
716
 
        except NoSuchFile:
717
 
            return None
718
 
 
719
 
    def add_object(self, obj):
720
 
        """Add a single object to this object store.
721
 
 
722
 
        :param obj: Object to add
723
 
        """
724
 
        (dir, file) = self._split_loose_object(obj.id)
725
 
        try:
726
 
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
727
 
        except FileExists:
728
 
            pass
729
 
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
730
 
        if self.transport.has(path):
731
 
            return  # Already there, no need to write again
732
 
        # Backwards compatibility with Dulwich < 0.20, which doesn't support
733
 
        # the compression_level parameter.
734
 
        if self.loose_compression_level not in (-1, None):
735
 
            raw_string = obj.as_legacy_object(
736
 
                compression_level=self.loose_compression_level)
737
 
        else:
738
 
            raw_string = obj.as_legacy_object()
739
 
        self.transport.put_bytes(path, raw_string)
740
 
 
741
 
    def move_in_pack(self, f):
742
 
        """Move a specific file containing a pack into the pack directory.
743
 
 
744
 
        :note: The file should be on the same file system as the
745
 
            packs directory.
746
 
 
747
 
        :param path: Path to the pack file.
748
 
        """
749
 
        f.seek(0)
750
 
        p = PackData("", f, len(f.getvalue()))
751
 
        entries = p.sorted_entries()
752
 
        basename = "pack-%s" % iter_sha1(entry[0]
753
 
                                         for entry in entries).decode('ascii')
754
 
        p._filename = basename + ".pack"
755
 
        f.seek(0)
756
 
        self.pack_transport.put_file(basename + ".pack", f)
757
 
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
758
 
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
759
 
        idxfile = self.pack_transport.get(basename + ".idx")
760
 
        idx = load_pack_index_file(basename + ".idx", idxfile)
761
 
        final_pack = Pack.from_objects(p, idx)
762
 
        final_pack._basename = basename
763
 
        self._add_cached_pack(basename, final_pack)
764
 
        return final_pack
765
 
 
766
 
    def move_in_thin_pack(self, f):
767
 
        """Move a specific file containing a pack into the pack directory.
768
 
 
769
 
        :note: The file should be on the same file system as the
770
 
            packs directory.
771
 
 
772
 
        :param path: Path to the pack file.
773
 
        """
774
 
        f.seek(0)
775
 
        p = Pack('', resolve_ext_ref=self.get_raw)
776
 
        p._data = PackData.from_file(f, len(f.getvalue()))
777
 
        p._data.pack = p
778
 
        p._idx_load = lambda: MemoryPackIndex(
779
 
            p.data.sorted_entries(), p.data.get_stored_checksum())
780
 
 
781
 
        pack_sha = p.index.objects_sha1()
782
 
 
783
 
        with self.pack_transport.open_write_stream(
784
 
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
785
 
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
786
 
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
787
 
        with self.pack_transport.open_write_stream(
788
 
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
789
 
            write_pack_index_v2(idxfile, entries, data_sum)
790
 
 
791
 
    def add_pack(self):
792
 
        """Add a new pack to this object store.
793
 
 
794
 
        :return: Fileobject to write to and a commit function to
795
 
            call when the pack is finished.
796
 
        """
797
 
        f = BytesIO()
798
 
 
799
 
        def commit():
800
 
            if len(f.getvalue()) > 0:
801
 
                return self.move_in_pack(f)
802
 
            else:
803
 
                return None
804
 
 
805
 
        def abort():
806
 
            return None
807
 
        return f, commit, abort
808
 
 
809
 
    @classmethod
810
 
    def init(cls, transport):
811
 
        try:
812
 
            transport.mkdir('info')
813
 
        except FileExists:
814
 
            pass
815
 
        try:
816
 
            transport.mkdir(PACKDIR)
817
 
        except FileExists:
818
 
            pass
819
 
        return cls(transport)