/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: John Arbash Meinel
  • Date: 2006-04-25 15:05:42 UTC
  • mfrom: (1185.85.85 bzr-encoding)
  • mto: This revision was merged to the branch mainline in revision 1752.
  • Revision ID: john@arbash-meinel.com-20060425150542-c7b518dca9928691
[merge] the old bzr-encoding changes, reparenting them on bzr.dev

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
 
#
3
 
# This program is free software; you can redistribute it and/or modify
4
 
# it under the terms of the GNU General Public License as published by
5
 
# the Free Software Foundation; either version 2 of the License, or
6
 
# (at your option) any later version.
7
 
#
8
 
# This program is distributed in the hope that it will be useful,
9
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
 
# GNU General Public License for more details.
12
 
#
13
 
# You should have received a copy of the GNU General Public License
14
 
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
 
 
17
 
"""A Git repository implementation that uses a Bazaar transport."""
18
 
 
19
 
from __future__ import absolute_import
20
 
 
21
 
from io import BytesIO
22
 
 
23
 
import os
24
 
import sys
25
 
 
26
 
from dulwich.errors import (
27
 
    NoIndexPresent,
28
 
    )
29
 
from dulwich.file import (
30
 
    GitFile,
31
 
    FileLocked,
32
 
    )
33
 
from dulwich.objects import (
34
 
    ShaFile,
35
 
    )
36
 
from dulwich.object_store import (
37
 
    PackBasedObjectStore,
38
 
    PACKDIR,
39
 
    read_packs_file,
40
 
    )
41
 
from dulwich.pack import (
42
 
    MemoryPackIndex,
43
 
    PackData,
44
 
    Pack,
45
 
    iter_sha1,
46
 
    load_pack_index_file,
47
 
    write_pack_objects,
48
 
    write_pack_index_v2,
49
 
    )
50
 
from dulwich.repo import (
51
 
    BaseRepo,
52
 
    InfoRefsContainer,
53
 
    RefsContainer,
54
 
    BASE_DIRECTORIES,
55
 
    COMMONDIR,
56
 
    CONTROLDIR,
57
 
    INDEX_FILENAME,
58
 
    OBJECTDIR,
59
 
    SYMREF,
60
 
    check_ref_format,
61
 
    read_packed_refs_with_peeled,
62
 
    read_packed_refs,
63
 
    write_packed_refs,
64
 
    )
65
 
 
66
 
from .. import (
67
 
    osutils,
68
 
    transport as _mod_transport,
69
 
    urlutils,
70
 
    )
71
 
from ..errors import (
72
 
    AlreadyControlDirError,
73
 
    FileExists,
74
 
    LockBroken,
75
 
    LockContention,
76
 
    NotLocalUrl,
77
 
    NoSuchFile,
78
 
    ReadError,
79
 
    TransportNotPossible,
80
 
    )
81
 
 
82
 
from ..lock import LogicalLockResult
83
 
from ..trace import warning
84
 
 
85
 
 
86
 
class TransportRefsContainer(RefsContainer):
87
 
    """Refs container that reads refs from a transport."""
88
 
 
89
 
    def __init__(self, transport, worktree_transport=None):
90
 
        self.transport = transport
91
 
        if worktree_transport is None:
92
 
            worktree_transport = transport
93
 
        self.worktree_transport = worktree_transport
94
 
        self._packed_refs = None
95
 
        self._peeled_refs = None
96
 
 
97
 
    def __repr__(self):
98
 
        return "%s(%r)" % (self.__class__.__name__, self.transport)
99
 
 
100
 
    def _ensure_dir_exists(self, path):
101
 
        for n in range(path.count("/")):
102
 
            dirname = "/".join(path.split("/")[:n + 1])
103
 
            try:
104
 
                self.transport.mkdir(dirname)
105
 
            except FileExists:
106
 
                pass
107
 
 
108
 
    def subkeys(self, base):
109
 
        """Refs present in this container under a base.
110
 
 
111
 
        :param base: The base to return refs under.
112
 
        :return: A set of valid refs in this container under the base; the base
113
 
            prefix is stripped from the ref names returned.
114
 
        """
115
 
        keys = set()
116
 
        base_len = len(base) + 1
117
 
        for refname in self.allkeys():
118
 
            if refname.startswith(base):
119
 
                keys.add(refname[base_len:])
120
 
        return keys
121
 
 
122
 
    def allkeys(self):
123
 
        keys = set()
124
 
        try:
125
 
            self.worktree_transport.get_bytes("HEAD")
126
 
        except NoSuchFile:
127
 
            pass
128
 
        else:
129
 
            keys.add(b"HEAD")
130
 
        try:
131
 
            iter_files = list(self.transport.clone(
132
 
                "refs").iter_files_recursive())
133
 
            for filename in iter_files:
134
 
                unquoted_filename = urlutils.unquote_to_bytes(filename)
135
 
                refname = osutils.pathjoin(b"refs", unquoted_filename)
136
 
                if check_ref_format(refname):
137
 
                    keys.add(refname)
138
 
        except (TransportNotPossible, NoSuchFile):
139
 
            pass
140
 
        keys.update(self.get_packed_refs())
141
 
        return keys
142
 
 
143
 
    def get_packed_refs(self):
144
 
        """Get contents of the packed-refs file.
145
 
 
146
 
        :return: Dictionary mapping ref names to SHA1s
147
 
 
148
 
        :note: Will return an empty dictionary when no packed-refs file is
149
 
            present.
150
 
        """
151
 
        # TODO: invalidate the cache on repacking
152
 
        if self._packed_refs is None:
153
 
            # set both to empty because we want _peeled_refs to be
154
 
            # None if and only if _packed_refs is also None.
155
 
            self._packed_refs = {}
156
 
            self._peeled_refs = {}
157
 
            try:
158
 
                f = self.transport.get("packed-refs")
159
 
            except NoSuchFile:
160
 
                return {}
161
 
            try:
162
 
                first_line = next(iter(f)).rstrip()
163
 
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
164
 
                        first_line):
165
 
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
166
 
                        self._packed_refs[name] = sha
167
 
                        if peeled:
168
 
                            self._peeled_refs[name] = peeled
169
 
                else:
170
 
                    f.seek(0)
171
 
                    for sha, name in read_packed_refs(f):
172
 
                        self._packed_refs[name] = sha
173
 
            finally:
174
 
                f.close()
175
 
        return self._packed_refs
176
 
 
177
 
    def get_peeled(self, name):
178
 
        """Return the cached peeled value of a ref, if available.
179
 
 
180
 
        :param name: Name of the ref to peel
181
 
        :return: The peeled value of the ref. If the ref is known not point to
182
 
            a tag, this will be the SHA the ref refers to. If the ref may point
183
 
            to a tag, but no cached information is available, None is returned.
184
 
        """
185
 
        self.get_packed_refs()
186
 
        if self._peeled_refs is None or name not in self._packed_refs:
187
 
            # No cache: no peeled refs were read, or this ref is loose
188
 
            return None
189
 
        if name in self._peeled_refs:
190
 
            return self._peeled_refs[name]
191
 
        else:
192
 
            # Known not peelable
193
 
            return self[name]
194
 
 
195
 
    def read_loose_ref(self, name):
196
 
        """Read a reference file and return its contents.
197
 
 
198
 
        If the reference file a symbolic reference, only read the first line of
199
 
        the file. Otherwise, only read the first 40 bytes.
200
 
 
201
 
        :param name: the refname to read, relative to refpath
202
 
        :return: The contents of the ref file, or None if the file does not
203
 
            exist.
204
 
        :raises IOError: if any other error occurs
205
 
        """
206
 
        if name == b'HEAD':
207
 
            transport = self.worktree_transport
208
 
        else:
209
 
            transport = self.transport
210
 
        try:
211
 
            f = transport.get(urlutils.quote_from_bytes(name))
212
 
        except NoSuchFile:
213
 
            return None
214
 
        with f:
215
 
            try:
216
 
                header = f.read(len(SYMREF))
217
 
            except ReadError:
218
 
                # probably a directory
219
 
                return None
220
 
            if header == SYMREF:
221
 
                # Read only the first line
222
 
                return header + next(iter(f)).rstrip(b"\r\n")
223
 
            else:
224
 
                # Read only the first 40 bytes
225
 
                return header + f.read(40 - len(SYMREF))
226
 
 
227
 
    def _remove_packed_ref(self, name):
228
 
        if self._packed_refs is None:
229
 
            return
230
 
        # reread cached refs from disk, while holding the lock
231
 
 
232
 
        self._packed_refs = None
233
 
        self.get_packed_refs()
234
 
 
235
 
        if name not in self._packed_refs:
236
 
            return
237
 
 
238
 
        del self._packed_refs[name]
239
 
        if name in self._peeled_refs:
240
 
            del self._peeled_refs[name]
241
 
        with self.transport.open_write_stream("packed-refs") as f:
242
 
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
243
 
 
244
 
    def set_symbolic_ref(self, name, other):
245
 
        """Make a ref point at another ref.
246
 
 
247
 
        :param name: Name of the ref to set
248
 
        :param other: Name of the ref to point at
249
 
        """
250
 
        self._check_refname(name)
251
 
        self._check_refname(other)
252
 
        if name != b'HEAD':
253
 
            transport = self.transport
254
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
255
 
        else:
256
 
            transport = self.worktree_transport
257
 
        transport.put_bytes(urlutils.quote_from_bytes(
258
 
            name), SYMREF + other + b'\n')
259
 
 
260
 
    def set_if_equals(self, name, old_ref, new_ref):
261
 
        """Set a refname to new_ref only if it currently equals old_ref.
262
 
 
263
 
        This method follows all symbolic references, and can be used to perform
264
 
        an atomic compare-and-swap operation.
265
 
 
266
 
        :param name: The refname to set.
267
 
        :param old_ref: The old sha the refname must refer to, or None to set
268
 
            unconditionally.
269
 
        :param new_ref: The new sha the refname will refer to.
270
 
        :return: True if the set was successful, False otherwise.
271
 
        """
272
 
        try:
273
 
            realnames, _ = self.follow(name)
274
 
            realname = realnames[-1]
275
 
        except (KeyError, IndexError):
276
 
            realname = name
277
 
        if realname == b'HEAD':
278
 
            transport = self.worktree_transport
279
 
        else:
280
 
            transport = self.transport
281
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
282
 
        transport.put_bytes(urlutils.quote_from_bytes(
283
 
            realname), new_ref + b"\n")
284
 
        return True
285
 
 
286
 
    def add_if_new(self, name, ref):
287
 
        """Add a new reference only if it does not already exist.
288
 
 
289
 
        This method follows symrefs, and only ensures that the last ref in the
290
 
        chain does not exist.
291
 
 
292
 
        :param name: The refname to set.
293
 
        :param ref: The new sha the refname will refer to.
294
 
        :return: True if the add was successful, False otherwise.
295
 
        """
296
 
        try:
297
 
            realnames, contents = self.follow(name)
298
 
            if contents is not None:
299
 
                return False
300
 
            realname = realnames[-1]
301
 
        except (KeyError, IndexError):
302
 
            realname = name
303
 
        self._check_refname(realname)
304
 
        if realname == b'HEAD':
305
 
            transport = self.worktree_transport
306
 
        else:
307
 
            transport = self.transport
308
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
309
 
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
310
 
        return True
311
 
 
312
 
    def remove_if_equals(self, name, old_ref):
313
 
        """Remove a refname only if it currently equals old_ref.
314
 
 
315
 
        This method does not follow symbolic references. It can be used to
316
 
        perform an atomic compare-and-delete operation.
317
 
 
318
 
        :param name: The refname to delete.
319
 
        :param old_ref: The old sha the refname must refer to, or None to
320
 
            delete unconditionally.
321
 
        :return: True if the delete was successful, False otherwise.
322
 
        """
323
 
        self._check_refname(name)
324
 
        # may only be packed
325
 
        if name == b'HEAD':
326
 
            transport = self.worktree_transport
327
 
        else:
328
 
            transport = self.transport
329
 
        try:
330
 
            transport.delete(urlutils.quote_from_bytes(name))
331
 
        except NoSuchFile:
332
 
            pass
333
 
        self._remove_packed_ref(name)
334
 
        return True
335
 
 
336
 
    def get(self, name, default=None):
337
 
        try:
338
 
            return self[name]
339
 
        except KeyError:
340
 
            return default
341
 
 
342
 
    def unlock_ref(self, name):
343
 
        if name == b"HEAD":
344
 
            transport = self.worktree_transport
345
 
        else:
346
 
            transport = self.transport
347
 
        lockname = name + b".lock"
348
 
        try:
349
 
            transport.delete(urlutils.quote_from_bytes(lockname))
350
 
        except NoSuchFile:
351
 
            pass
352
 
 
353
 
    def lock_ref(self, name):
354
 
        if name == b"HEAD":
355
 
            transport = self.worktree_transport
356
 
        else:
357
 
            transport = self.transport
358
 
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
359
 
        lockname = urlutils.quote_from_bytes(name + b".lock")
360
 
        try:
361
 
            local_path = transport.local_abspath(
362
 
                urlutils.quote_from_bytes(name))
363
 
        except NotLocalUrl:
364
 
            # This is racy, but what can we do?
365
 
            if transport.has(lockname):
366
 
                raise LockContention(name)
367
 
            transport.put_bytes(lockname, b'Locked by brz-git')
368
 
            return LogicalLockResult(lambda: transport.delete(lockname))
369
 
        else:
370
 
            try:
371
 
                gf = GitFile(local_path, 'wb')
372
 
            except FileLocked as e:
373
 
                raise LockContention(name, e)
374
 
            else:
375
 
                def unlock():
376
 
                    try:
377
 
                        transport.delete(lockname)
378
 
                    except NoSuchFile:
379
 
                        raise LockBroken(lockname)
380
 
                    # GitFile.abort doesn't care if the lock has already
381
 
                    # disappeared
382
 
                    gf.abort()
383
 
                return LogicalLockResult(unlock)
384
 
 
385
 
 
386
 
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
387
 
# rather than bytes..
388
 
def read_gitfile(f):
389
 
    """Read a ``.git`` file.
390
 
 
391
 
    The first line of the file should start with "gitdir: "
392
 
 
393
 
    :param f: File-like object to read from
394
 
    :return: A path
395
 
    """
396
 
    cs = f.read()
397
 
    if not cs.startswith(b"gitdir: "):
398
 
        raise ValueError("Expected file to start with 'gitdir: '")
399
 
    return cs[len(b"gitdir: "):].rstrip(b"\n")
400
 
 
401
 
 
402
 
class TransportRepo(BaseRepo):
403
 
 
404
 
    def __init__(self, transport, bare, refs_text=None):
405
 
        self.transport = transport
406
 
        self.bare = bare
407
 
        try:
408
 
            with transport.get(CONTROLDIR) as f:
409
 
                path = read_gitfile(f)
410
 
        except (ReadError, NoSuchFile):
411
 
            if self.bare:
412
 
                self._controltransport = self.transport
413
 
            else:
414
 
                self._controltransport = self.transport.clone('.git')
415
 
        else:
416
 
            self._controltransport = self.transport.clone(
417
 
                urlutils.quote_from_bytes(path))
418
 
        commondir = self.get_named_file(COMMONDIR)
419
 
        if commondir is not None:
420
 
            with commondir:
421
 
                commondir = os.path.join(
422
 
                    self.controldir(),
423
 
                    commondir.read().rstrip(b"\r\n").decode(
424
 
                        sys.getfilesystemencoding()))
425
 
                self._commontransport = \
426
 
                    _mod_transport.get_transport_from_path(commondir)
427
 
        else:
428
 
            self._commontransport = self._controltransport
429
 
        object_store = TransportObjectStore(
430
 
            self._commontransport.clone(OBJECTDIR))
431
 
        if refs_text is not None:
432
 
            refs_container = InfoRefsContainer(BytesIO(refs_text))
433
 
            try:
434
 
                head = TransportRefsContainer(
435
 
                    self._commontransport).read_loose_ref(b"HEAD")
436
 
            except KeyError:
437
 
                pass
438
 
            else:
439
 
                refs_container._refs[b"HEAD"] = head
440
 
        else:
441
 
            refs_container = TransportRefsContainer(
442
 
                self._commontransport, self._controltransport)
443
 
        super(TransportRepo, self).__init__(object_store,
444
 
                                            refs_container)
445
 
 
446
 
    def controldir(self):
447
 
        return self._controltransport.local_abspath('.')
448
 
 
449
 
    def commondir(self):
450
 
        return self._commontransport.local_abspath('.')
451
 
 
452
 
    @property
453
 
    def path(self):
454
 
        return self.transport.local_abspath('.')
455
 
 
456
 
    def _determine_file_mode(self):
457
 
        # Be consistent with bzr
458
 
        if sys.platform == 'win32':
459
 
            return False
460
 
        return True
461
 
 
462
 
    def get_named_file(self, path):
463
 
        """Get a file from the control dir with a specific name.
464
 
 
465
 
        Although the filename should be interpreted as a filename relative to
466
 
        the control dir in a disk-baked Repo, the object returned need not be
467
 
        pointing to a file in that location.
468
 
 
469
 
        :param path: The path to the file, relative to the control dir.
470
 
        :return: An open file object, or None if the file does not exist.
471
 
        """
472
 
        try:
473
 
            return self._controltransport.get(path.lstrip('/'))
474
 
        except NoSuchFile:
475
 
            return None
476
 
 
477
 
    def _put_named_file(self, relpath, contents):
478
 
        self._controltransport.put_bytes(relpath, contents)
479
 
 
480
 
    def index_path(self):
481
 
        """Return the path to the index file."""
482
 
        return self._controltransport.local_abspath(INDEX_FILENAME)
483
 
 
484
 
    def open_index(self):
485
 
        """Open the index for this repository."""
486
 
        from dulwich.index import Index
487
 
        if not self.has_index():
488
 
            raise NoIndexPresent()
489
 
        return Index(self.index_path())
490
 
 
491
 
    def has_index(self):
492
 
        """Check if an index is present."""
493
 
        # Bare repos must never have index files; non-bare repos may have a
494
 
        # missing index file, which is treated as empty.
495
 
        return not self.bare
496
 
 
497
 
    def get_config(self):
498
 
        from dulwich.config import ConfigFile
499
 
        try:
500
 
            with self._controltransport.get('config') as f:
501
 
                return ConfigFile.from_file(f)
502
 
        except NoSuchFile:
503
 
            return ConfigFile()
504
 
 
505
 
    def get_config_stack(self):
506
 
        from dulwich.config import StackedConfig
507
 
        backends = []
508
 
        p = self.get_config()
509
 
        if p is not None:
510
 
            backends.append(p)
511
 
            writable = p
512
 
        else:
513
 
            writable = None
514
 
        backends.extend(StackedConfig.default_backends())
515
 
        return StackedConfig(backends, writable=writable)
516
 
 
517
 
    def __repr__(self):
518
 
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
519
 
 
520
 
    @classmethod
521
 
    def init(cls, transport, bare=False):
522
 
        if not bare:
523
 
            try:
524
 
                transport.mkdir(".git")
525
 
            except FileExists:
526
 
                raise AlreadyControlDirError(transport.base)
527
 
            control_transport = transport.clone(".git")
528
 
        else:
529
 
            control_transport = transport
530
 
        for d in BASE_DIRECTORIES:
531
 
            try:
532
 
                control_transport.mkdir("/".join(d))
533
 
            except FileExists:
534
 
                pass
535
 
        try:
536
 
            control_transport.mkdir(OBJECTDIR)
537
 
        except FileExists:
538
 
            raise AlreadyControlDirError(transport.base)
539
 
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
540
 
        ret = cls(transport, bare)
541
 
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
542
 
        ret._init_files(bare)
543
 
        return ret
544
 
 
545
 
 
546
 
class TransportObjectStore(PackBasedObjectStore):
547
 
    """Git-style object store that exists on disk."""
548
 
 
549
 
    def __init__(self, transport):
550
 
        """Open an object store.
551
 
 
552
 
        :param transport: Transport to open data from
553
 
        """
554
 
        super(TransportObjectStore, self).__init__()
555
 
        self.transport = transport
556
 
        self.pack_transport = self.transport.clone(PACKDIR)
557
 
        self._alternates = None
558
 
 
559
 
    def __eq__(self, other):
560
 
        if not isinstance(other, TransportObjectStore):
561
 
            return False
562
 
        return self.transport == other.transport
563
 
 
564
 
    def __repr__(self):
565
 
        return "%s(%r)" % (self.__class__.__name__, self.transport)
566
 
 
567
 
    @property
568
 
    def alternates(self):
569
 
        if self._alternates is not None:
570
 
            return self._alternates
571
 
        self._alternates = []
572
 
        for path in self._read_alternate_paths():
573
 
            # FIXME: Check path
574
 
            t = _mod_transport.get_transport_from_path(path)
575
 
            self._alternates.append(self.__class__(t))
576
 
        return self._alternates
577
 
 
578
 
    def _read_alternate_paths(self):
579
 
        try:
580
 
            f = self.transport.get("info/alternates")
581
 
        except NoSuchFile:
582
 
            return []
583
 
        ret = []
584
 
        with f:
585
 
            for l in f.read().splitlines():
586
 
                if l[0] == b"#":
587
 
                    continue
588
 
                if os.path.isabs(l):
589
 
                    continue
590
 
                ret.append(l)
591
 
            return ret
592
 
 
593
 
    def _update_pack_cache(self):
594
 
        pack_files = set(self._pack_names())
595
 
        new_packs = []
596
 
        for basename in pack_files:
597
 
            pack_name = basename + ".pack"
598
 
            if basename not in self._pack_cache:
599
 
                try:
600
 
                    size = self.pack_transport.stat(pack_name).st_size
601
 
                except TransportNotPossible:
602
 
                    f = self.pack_transport.get(pack_name)
603
 
                    # TODO(jelmer): Don't read entire file into memory?
604
 
                    f = BytesIO(f.read())
605
 
                    pd = PackData(pack_name, f)
606
 
                else:
607
 
                    pd = PackData(
608
 
                        pack_name, self.pack_transport.get(pack_name),
609
 
                        size=size)
610
 
                idxname = basename + ".idx"
611
 
                idx = load_pack_index_file(
612
 
                    idxname, self.pack_transport.get(idxname))
613
 
                pack = Pack.from_objects(pd, idx)
614
 
                pack._basename = basename
615
 
                self._pack_cache[basename] = pack
616
 
                new_packs.append(pack)
617
 
        # Remove disappeared pack files
618
 
        for f in set(self._pack_cache) - pack_files:
619
 
            self._pack_cache.pop(f).close()
620
 
        return new_packs
621
 
 
622
 
    def _pack_names(self):
623
 
        pack_files = []
624
 
        try:
625
 
            dir_contents = self.pack_transport.list_dir(".")
626
 
            for name in dir_contents:
627
 
                if name.startswith("pack-") and name.endswith(".pack"):
628
 
                    # verify that idx exists first (otherwise the pack was not yet
629
 
                    # fully written)
630
 
                    idx_name = os.path.splitext(name)[0] + ".idx"
631
 
                    if idx_name in dir_contents:
632
 
                        pack_files.append(os.path.splitext(name)[0])
633
 
        except TransportNotPossible:
634
 
            try:
635
 
                f = self.transport.get('info/packs')
636
 
            except NoSuchFile:
637
 
                warning('No info/packs on remote host;'
638
 
                        'run \'git update-server-info\' on remote.')
639
 
            else:
640
 
                with f:
641
 
                    pack_files = [
642
 
                        os.path.splitext(name)[0]
643
 
                        for name in read_packs_file(f)]
644
 
        except NoSuchFile:
645
 
            pass
646
 
        return pack_files
647
 
 
648
 
    def _remove_pack(self, pack):
649
 
        self.pack_transport.delete(os.path.basename(pack.index.path))
650
 
        self.pack_transport.delete(pack.data.filename)
651
 
        try:
652
 
            del self._pack_cache[os.path.basename(pack._basename)]
653
 
        except KeyError:
654
 
            pass
655
 
 
656
 
    def _iter_loose_objects(self):
657
 
        for base in self.transport.list_dir('.'):
658
 
            if len(base) != 2:
659
 
                continue
660
 
            for rest in self.transport.list_dir(base):
661
 
                yield (base + rest).encode(sys.getfilesystemencoding())
662
 
 
663
 
    def _split_loose_object(self, sha):
664
 
        return (sha[:2], sha[2:])
665
 
 
666
 
    def _remove_loose_object(self, sha):
667
 
        path = osutils.joinpath(self._split_loose_object(sha))
668
 
        self.transport.delete(urlutils.quote_from_bytes(path))
669
 
 
670
 
    def _get_loose_object(self, sha):
671
 
        path = osutils.joinpath(self._split_loose_object(sha))
672
 
        try:
673
 
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
674
 
                return ShaFile.from_file(f)
675
 
        except NoSuchFile:
676
 
            return None
677
 
 
678
 
    def add_object(self, obj):
679
 
        """Add a single object to this object store.
680
 
 
681
 
        :param obj: Object to add
682
 
        """
683
 
        (dir, file) = self._split_loose_object(obj.id)
684
 
        try:
685
 
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
686
 
        except FileExists:
687
 
            pass
688
 
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
689
 
        if self.transport.has(path):
690
 
            return  # Already there, no need to write again
691
 
        self.transport.put_bytes(path, obj.as_legacy_object())
692
 
 
693
 
    def move_in_pack(self, f):
694
 
        """Move a specific file containing a pack into the pack directory.
695
 
 
696
 
        :note: The file should be on the same file system as the
697
 
            packs directory.
698
 
 
699
 
        :param path: Path to the pack file.
700
 
        """
701
 
        f.seek(0)
702
 
        p = PackData("", f, len(f.getvalue()))
703
 
        entries = p.sorted_entries()
704
 
        basename = "pack-%s" % iter_sha1(entry[0]
705
 
                                         for entry in entries).decode('ascii')
706
 
        p._filename = basename + ".pack"
707
 
        f.seek(0)
708
 
        self.pack_transport.put_file(basename + ".pack", f)
709
 
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
710
 
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
711
 
        idxfile = self.pack_transport.get(basename + ".idx")
712
 
        idx = load_pack_index_file(basename + ".idx", idxfile)
713
 
        final_pack = Pack.from_objects(p, idx)
714
 
        final_pack._basename = basename
715
 
        self._add_cached_pack(basename, final_pack)
716
 
        return final_pack
717
 
 
718
 
    def move_in_thin_pack(self, f):
719
 
        """Move a specific file containing a pack into the pack directory.
720
 
 
721
 
        :note: The file should be on the same file system as the
722
 
            packs directory.
723
 
 
724
 
        :param path: Path to the pack file.
725
 
        """
726
 
        f.seek(0)
727
 
        p = Pack('', resolve_ext_ref=self.get_raw)
728
 
        p._data = PackData.from_file(f, len(f.getvalue()))
729
 
        p._data.pack = p
730
 
        p._idx_load = lambda: MemoryPackIndex(
731
 
            p.data.sorted_entries(), p.data.get_stored_checksum())
732
 
 
733
 
        pack_sha = p.index.objects_sha1()
734
 
 
735
 
        with self.pack_transport.open_write_stream(
736
 
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
737
 
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
738
 
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
739
 
        with self.pack_transport.open_write_stream(
740
 
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
741
 
            write_pack_index_v2(idxfile, entries, data_sum)
742
 
 
743
 
    def add_pack(self):
744
 
        """Add a new pack to this object store.
745
 
 
746
 
        :return: Fileobject to write to and a commit function to
747
 
            call when the pack is finished.
748
 
        """
749
 
        f = BytesIO()
750
 
 
751
 
        def commit():
752
 
            if len(f.getvalue()) > 0:
753
 
                return self.move_in_pack(f)
754
 
            else:
755
 
                return None
756
 
 
757
 
        def abort():
758
 
            return None
759
 
        return f, commit, abort
760
 
 
761
 
    @classmethod
762
 
    def init(cls, transport):
763
 
        try:
764
 
            transport.mkdir('info')
765
 
        except FileExists:
766
 
            pass
767
 
        try:
768
 
            transport.mkdir(PACKDIR)
769
 
        except FileExists:
770
 
            pass
771
 
        return cls(transport)