/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2019-11-03 12:38:45 UTC
  • mto: This revision was merged to the branch mainline in revision 7413.
  • Revision ID: jelmer@jelmer.uk-20191103123845-5726o8n89u0i5bjw
Fix tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""A Git repository implementation that uses a Bazaar transport."""
 
18
 
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
 
 
26
from dulwich.errors import (
 
27
    NoIndexPresent,
 
28
    )
 
29
from dulwich.file import (
 
30
    GitFile,
 
31
    FileLocked,
 
32
    )
 
33
from dulwich.objects import (
 
34
    ShaFile,
 
35
    )
 
36
from dulwich.object_store import (
 
37
    PackBasedObjectStore,
 
38
    PACKDIR,
 
39
    read_packs_file,
 
40
    )
 
41
from dulwich.pack import (
 
42
    MemoryPackIndex,
 
43
    PackData,
 
44
    Pack,
 
45
    iter_sha1,
 
46
    load_pack_index_file,
 
47
    write_pack_objects,
 
48
    write_pack_index_v2,
 
49
    )
 
50
from dulwich.repo import (
 
51
    BaseRepo,
 
52
    InfoRefsContainer,
 
53
    RefsContainer,
 
54
    BASE_DIRECTORIES,
 
55
    COMMONDIR,
 
56
    CONTROLDIR,
 
57
    INDEX_FILENAME,
 
58
    OBJECTDIR,
 
59
    SYMREF,
 
60
    check_ref_format,
 
61
    read_packed_refs_with_peeled,
 
62
    read_packed_refs,
 
63
    write_packed_refs,
 
64
    )
 
65
 
 
66
from .. import (
 
67
    osutils,
 
68
    transport as _mod_transport,
 
69
    urlutils,
 
70
    )
 
71
from ..errors import (
 
72
    AlreadyControlDirError,
 
73
    FileExists,
 
74
    LockBroken,
 
75
    LockContention,
 
76
    NotLocalUrl,
 
77
    NoSuchFile,
 
78
    ReadError,
 
79
    TransportNotPossible,
 
80
    )
 
81
 
 
82
from ..lock import LogicalLockResult
 
83
from ..trace import warning
 
84
 
 
85
 
 
86
class TransportRefsContainer(RefsContainer):
 
87
    """Refs container that reads refs from a transport."""
 
88
 
 
89
    def __init__(self, transport, worktree_transport=None):
 
90
        self.transport = transport
 
91
        if worktree_transport is None:
 
92
            worktree_transport = transport
 
93
        self.worktree_transport = worktree_transport
 
94
        self._packed_refs = None
 
95
        self._peeled_refs = None
 
96
 
 
97
    def __repr__(self):
 
98
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
99
 
 
100
    def _ensure_dir_exists(self, path):
 
101
        for n in range(path.count("/")):
 
102
            dirname = "/".join(path.split("/")[:n + 1])
 
103
            try:
 
104
                self.transport.mkdir(dirname)
 
105
            except FileExists:
 
106
                pass
 
107
 
 
108
    def subkeys(self, base):
 
109
        """Refs present in this container under a base.
 
110
 
 
111
        :param base: The base to return refs under.
 
112
        :return: A set of valid refs in this container under the base; the base
 
113
            prefix is stripped from the ref names returned.
 
114
        """
 
115
        keys = set()
 
116
        base_len = len(base) + 1
 
117
        for refname in self.allkeys():
 
118
            if refname.startswith(base):
 
119
                keys.add(refname[base_len:])
 
120
        return keys
 
121
 
 
122
    def allkeys(self):
 
123
        keys = set()
 
124
        try:
 
125
            self.worktree_transport.get_bytes("HEAD")
 
126
        except NoSuchFile:
 
127
            pass
 
128
        else:
 
129
            keys.add(b"HEAD")
 
130
        try:
 
131
            iter_files = list(self.transport.clone(
 
132
                "refs").iter_files_recursive())
 
133
            for filename in iter_files:
 
134
                unquoted_filename = urlutils.unquote_to_bytes(filename)
 
135
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
136
                if check_ref_format(refname):
 
137
                    keys.add(refname)
 
138
        except (TransportNotPossible, NoSuchFile):
 
139
            pass
 
140
        keys.update(self.get_packed_refs())
 
141
        return keys
 
142
 
 
143
    def get_packed_refs(self):
 
144
        """Get contents of the packed-refs file.
 
145
 
 
146
        :return: Dictionary mapping ref names to SHA1s
 
147
 
 
148
        :note: Will return an empty dictionary when no packed-refs file is
 
149
            present.
 
150
        """
 
151
        # TODO: invalidate the cache on repacking
 
152
        if self._packed_refs is None:
 
153
            # set both to empty because we want _peeled_refs to be
 
154
            # None if and only if _packed_refs is also None.
 
155
            self._packed_refs = {}
 
156
            self._peeled_refs = {}
 
157
            try:
 
158
                f = self.transport.get("packed-refs")
 
159
            except NoSuchFile:
 
160
                return {}
 
161
            try:
 
162
                first_line = next(iter(f)).rstrip()
 
163
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
 
164
                        first_line):
 
165
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
 
166
                        self._packed_refs[name] = sha
 
167
                        if peeled:
 
168
                            self._peeled_refs[name] = peeled
 
169
                else:
 
170
                    f.seek(0)
 
171
                    for sha, name in read_packed_refs(f):
 
172
                        self._packed_refs[name] = sha
 
173
            finally:
 
174
                f.close()
 
175
        return self._packed_refs
 
176
 
 
177
    def get_peeled(self, name):
 
178
        """Return the cached peeled value of a ref, if available.
 
179
 
 
180
        :param name: Name of the ref to peel
 
181
        :return: The peeled value of the ref. If the ref is known not point to
 
182
            a tag, this will be the SHA the ref refers to. If the ref may point
 
183
            to a tag, but no cached information is available, None is returned.
 
184
        """
 
185
        self.get_packed_refs()
 
186
        if self._peeled_refs is None or name not in self._packed_refs:
 
187
            # No cache: no peeled refs were read, or this ref is loose
 
188
            return None
 
189
        if name in self._peeled_refs:
 
190
            return self._peeled_refs[name]
 
191
        else:
 
192
            # Known not peelable
 
193
            return self[name]
 
194
 
 
195
    def read_loose_ref(self, name):
 
196
        """Read a reference file and return its contents.
 
197
 
 
198
        If the reference file a symbolic reference, only read the first line of
 
199
        the file. Otherwise, only read the first 40 bytes.
 
200
 
 
201
        :param name: the refname to read, relative to refpath
 
202
        :return: The contents of the ref file, or None if the file does not
 
203
            exist.
 
204
        :raises IOError: if any other error occurs
 
205
        """
 
206
        if name == b'HEAD':
 
207
            transport = self.worktree_transport
 
208
        else:
 
209
            transport = self.transport
 
210
        try:
 
211
            f = transport.get(urlutils.quote_from_bytes(name))
 
212
        except NoSuchFile:
 
213
            return None
 
214
        with f:
 
215
            header = f.read(len(SYMREF))
 
216
            if header == SYMREF:
 
217
                # Read only the first line
 
218
                return header + next(iter(f)).rstrip(b"\r\n")
 
219
            else:
 
220
                # Read only the first 40 bytes
 
221
                return header + f.read(40 - len(SYMREF))
 
222
 
 
223
    def _remove_packed_ref(self, name):
 
224
        if self._packed_refs is None:
 
225
            return
 
226
        # reread cached refs from disk, while holding the lock
 
227
 
 
228
        self._packed_refs = None
 
229
        self.get_packed_refs()
 
230
 
 
231
        if name not in self._packed_refs:
 
232
            return
 
233
 
 
234
        del self._packed_refs[name]
 
235
        if name in self._peeled_refs:
 
236
            del self._peeled_refs[name]
 
237
        with self.transport.open_write_stream("packed-refs") as f:
 
238
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
239
 
 
240
    def set_symbolic_ref(self, name, other):
 
241
        """Make a ref point at another ref.
 
242
 
 
243
        :param name: Name of the ref to set
 
244
        :param other: Name of the ref to point at
 
245
        """
 
246
        self._check_refname(name)
 
247
        self._check_refname(other)
 
248
        if name != b'HEAD':
 
249
            transport = self.transport
 
250
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
251
        else:
 
252
            transport = self.worktree_transport
 
253
        transport.put_bytes(urlutils.quote_from_bytes(
 
254
            name), SYMREF + other + b'\n')
 
255
 
 
256
    def set_if_equals(self, name, old_ref, new_ref):
 
257
        """Set a refname to new_ref only if it currently equals old_ref.
 
258
 
 
259
        This method follows all symbolic references, and can be used to perform
 
260
        an atomic compare-and-swap operation.
 
261
 
 
262
        :param name: The refname to set.
 
263
        :param old_ref: The old sha the refname must refer to, or None to set
 
264
            unconditionally.
 
265
        :param new_ref: The new sha the refname will refer to.
 
266
        :return: True if the set was successful, False otherwise.
 
267
        """
 
268
        try:
 
269
            realnames, _ = self.follow(name)
 
270
            realname = realnames[-1]
 
271
        except (KeyError, IndexError):
 
272
            realname = name
 
273
        if realname == b'HEAD':
 
274
            transport = self.worktree_transport
 
275
        else:
 
276
            transport = self.transport
 
277
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
278
        transport.put_bytes(urlutils.quote_from_bytes(
 
279
            realname), new_ref + b"\n")
 
280
        return True
 
281
 
 
282
    def add_if_new(self, name, ref):
 
283
        """Add a new reference only if it does not already exist.
 
284
 
 
285
        This method follows symrefs, and only ensures that the last ref in the
 
286
        chain does not exist.
 
287
 
 
288
        :param name: The refname to set.
 
289
        :param ref: The new sha the refname will refer to.
 
290
        :return: True if the add was successful, False otherwise.
 
291
        """
 
292
        try:
 
293
            realnames, contents = self.follow(name)
 
294
            if contents is not None:
 
295
                return False
 
296
            realname = realnames[-1]
 
297
        except (KeyError, IndexError):
 
298
            realname = name
 
299
        self._check_refname(realname)
 
300
        if realname == b'HEAD':
 
301
            transport = self.worktree_transport
 
302
        else:
 
303
            transport = self.transport
 
304
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
305
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
 
306
        return True
 
307
 
 
308
    def remove_if_equals(self, name, old_ref):
 
309
        """Remove a refname only if it currently equals old_ref.
 
310
 
 
311
        This method does not follow symbolic references. It can be used to
 
312
        perform an atomic compare-and-delete operation.
 
313
 
 
314
        :param name: The refname to delete.
 
315
        :param old_ref: The old sha the refname must refer to, or None to
 
316
            delete unconditionally.
 
317
        :return: True if the delete was successful, False otherwise.
 
318
        """
 
319
        self._check_refname(name)
 
320
        # may only be packed
 
321
        if name == b'HEAD':
 
322
            transport = self.worktree_transport
 
323
        else:
 
324
            transport = self.transport
 
325
        try:
 
326
            transport.delete(urlutils.quote_from_bytes(name))
 
327
        except NoSuchFile:
 
328
            pass
 
329
        self._remove_packed_ref(name)
 
330
        return True
 
331
 
 
332
    def get(self, name, default=None):
 
333
        try:
 
334
            return self[name]
 
335
        except KeyError:
 
336
            return default
 
337
 
 
338
    def unlock_ref(self, name):
 
339
        if name == b"HEAD":
 
340
            transport = self.worktree_transport
 
341
        else:
 
342
            transport = self.transport
 
343
        lockname = name + b".lock"
 
344
        try:
 
345
            transport.delete(urlutils.quote_from_bytes(lockname))
 
346
        except NoSuchFile:
 
347
            pass
 
348
 
 
349
    def lock_ref(self, name):
 
350
        if name == b"HEAD":
 
351
            transport = self.worktree_transport
 
352
        else:
 
353
            transport = self.transport
 
354
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
355
        lockname = urlutils.quote_from_bytes(name + b".lock")
 
356
        try:
 
357
            local_path = transport.local_abspath(
 
358
                urlutils.quote_from_bytes(name))
 
359
        except NotLocalUrl:
 
360
            # This is racy, but what can we do?
 
361
            if transport.has(lockname):
 
362
                raise LockContention(name)
 
363
            transport.put_bytes(lockname, b'Locked by brz-git')
 
364
            return LogicalLockResult(lambda: transport.delete(lockname))
 
365
        else:
 
366
            try:
 
367
                gf = GitFile(local_path, 'wb')
 
368
            except FileLocked as e:
 
369
                raise LockContention(name, e)
 
370
            else:
 
371
                def unlock():
 
372
                    try:
 
373
                        transport.delete(lockname)
 
374
                    except NoSuchFile:
 
375
                        raise LockBroken(lockname)
 
376
                    # GitFile.abort doesn't care if the lock has already
 
377
                    # disappeared
 
378
                    gf.abort()
 
379
                return LogicalLockResult(unlock)
 
380
 
 
381
 
 
382
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
 
383
# rather than bytes..
 
384
def read_gitfile(f):
 
385
    """Read a ``.git`` file.
 
386
 
 
387
    The first line of the file should start with "gitdir: "
 
388
 
 
389
    :param f: File-like object to read from
 
390
    :return: A path
 
391
    """
 
392
    cs = f.read()
 
393
    if not cs.startswith(b"gitdir: "):
 
394
        raise ValueError("Expected file to start with 'gitdir: '")
 
395
    return cs[len(b"gitdir: "):].rstrip(b"\n")
 
396
 
 
397
 
 
398
class TransportRepo(BaseRepo):
 
399
 
 
400
    def __init__(self, transport, bare, refs_text=None):
 
401
        self.transport = transport
 
402
        self.bare = bare
 
403
        try:
 
404
            with transport.get(CONTROLDIR) as f:
 
405
                path = read_gitfile(f)
 
406
        except (ReadError, NoSuchFile):
 
407
            if self.bare:
 
408
                self._controltransport = self.transport
 
409
            else:
 
410
                self._controltransport = self.transport.clone('.git')
 
411
        else:
 
412
            self._controltransport = self.transport.clone(
 
413
                urlutils.quote_from_bytes(path))
 
414
        commondir = self.get_named_file(COMMONDIR)
 
415
        if commondir is not None:
 
416
            with commondir:
 
417
                commondir = os.path.join(
 
418
                    self.controldir(),
 
419
                    commondir.read().rstrip(b"\r\n").decode(
 
420
                        sys.getfilesystemencoding()))
 
421
                self._commontransport = \
 
422
                    _mod_transport.get_transport_from_path(commondir)
 
423
        else:
 
424
            self._commontransport = self._controltransport
 
425
        object_store = TransportObjectStore(
 
426
            self._commontransport.clone(OBJECTDIR))
 
427
        if refs_text is not None:
 
428
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
429
            try:
 
430
                head = TransportRefsContainer(
 
431
                    self._commontransport).read_loose_ref(b"HEAD")
 
432
            except KeyError:
 
433
                pass
 
434
            else:
 
435
                refs_container._refs[b"HEAD"] = head
 
436
        else:
 
437
            refs_container = TransportRefsContainer(
 
438
                self._commontransport, self._controltransport)
 
439
        super(TransportRepo, self).__init__(object_store,
 
440
                                            refs_container)
 
441
 
 
442
    def controldir(self):
 
443
        return self._controltransport.local_abspath('.')
 
444
 
 
445
    def commondir(self):
 
446
        return self._commontransport.local_abspath('.')
 
447
 
 
448
    @property
 
449
    def path(self):
 
450
        return self.transport.local_abspath('.')
 
451
 
 
452
    def _determine_file_mode(self):
 
453
        # Be consistent with bzr
 
454
        if sys.platform == 'win32':
 
455
            return False
 
456
        return True
 
457
 
 
458
    def get_named_file(self, path):
 
459
        """Get a file from the control dir with a specific name.
 
460
 
 
461
        Although the filename should be interpreted as a filename relative to
 
462
        the control dir in a disk-baked Repo, the object returned need not be
 
463
        pointing to a file in that location.
 
464
 
 
465
        :param path: The path to the file, relative to the control dir.
 
466
        :return: An open file object, or None if the file does not exist.
 
467
        """
 
468
        try:
 
469
            return self._controltransport.get(path.lstrip('/'))
 
470
        except NoSuchFile:
 
471
            return None
 
472
 
 
473
    def _put_named_file(self, relpath, contents):
 
474
        self._controltransport.put_bytes(relpath, contents)
 
475
 
 
476
    def index_path(self):
 
477
        """Return the path to the index file."""
 
478
        return self._controltransport.local_abspath(INDEX_FILENAME)
 
479
 
 
480
    def open_index(self):
 
481
        """Open the index for this repository."""
 
482
        from dulwich.index import Index
 
483
        if not self.has_index():
 
484
            raise NoIndexPresent()
 
485
        return Index(self.index_path())
 
486
 
 
487
    def has_index(self):
 
488
        """Check if an index is present."""
 
489
        # Bare repos must never have index files; non-bare repos may have a
 
490
        # missing index file, which is treated as empty.
 
491
        return not self.bare
 
492
 
 
493
    def get_config(self):
 
494
        from dulwich.config import ConfigFile
 
495
        try:
 
496
            with self._controltransport.get('config') as f:
 
497
                return ConfigFile.from_file(f)
 
498
        except NoSuchFile:
 
499
            return ConfigFile()
 
500
 
 
501
    def get_config_stack(self):
 
502
        from dulwich.config import StackedConfig
 
503
        backends = []
 
504
        p = self.get_config()
 
505
        if p is not None:
 
506
            backends.append(p)
 
507
            writable = p
 
508
        else:
 
509
            writable = None
 
510
        backends.extend(StackedConfig.default_backends())
 
511
        return StackedConfig(backends, writable=writable)
 
512
 
 
513
    def __repr__(self):
 
514
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
 
515
 
 
516
    @classmethod
 
517
    def init(cls, transport, bare=False):
 
518
        if not bare:
 
519
            try:
 
520
                transport.mkdir(".git")
 
521
            except FileExists:
 
522
                raise AlreadyControlDirError(transport.base)
 
523
            control_transport = transport.clone(".git")
 
524
        else:
 
525
            control_transport = transport
 
526
        for d in BASE_DIRECTORIES:
 
527
            try:
 
528
                control_transport.mkdir("/".join(d))
 
529
            except FileExists:
 
530
                pass
 
531
        try:
 
532
            control_transport.mkdir(OBJECTDIR)
 
533
        except FileExists:
 
534
            raise AlreadyControlDirError(transport.base)
 
535
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
536
        ret = cls(transport, bare)
 
537
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
538
        ret._init_files(bare)
 
539
        return ret
 
540
 
 
541
 
 
542
class TransportObjectStore(PackBasedObjectStore):
 
543
    """Git-style object store that exists on disk."""
 
544
 
 
545
    def __init__(self, transport):
 
546
        """Open an object store.
 
547
 
 
548
        :param transport: Transport to open data from
 
549
        """
 
550
        super(TransportObjectStore, self).__init__()
 
551
        self.transport = transport
 
552
        self.pack_transport = self.transport.clone(PACKDIR)
 
553
        self._alternates = None
 
554
 
 
555
    def __eq__(self, other):
 
556
        if not isinstance(other, TransportObjectStore):
 
557
            return False
 
558
        return self.transport == other.transport
 
559
 
 
560
    def __repr__(self):
 
561
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
562
 
 
563
    @property
 
564
    def alternates(self):
 
565
        if self._alternates is not None:
 
566
            return self._alternates
 
567
        self._alternates = []
 
568
        for path in self._read_alternate_paths():
 
569
            # FIXME: Check path
 
570
            t = _mod_transport.get_transport_from_path(path)
 
571
            self._alternates.append(self.__class__(t))
 
572
        return self._alternates
 
573
 
 
574
    def _read_alternate_paths(self):
 
575
        try:
 
576
            f = self.transport.get("info/alternates")
 
577
        except NoSuchFile:
 
578
            return []
 
579
        ret = []
 
580
        with f:
 
581
            for l in f.read().splitlines():
 
582
                if l[0] == b"#":
 
583
                    continue
 
584
                if os.path.isabs(l):
 
585
                    continue
 
586
                ret.append(l)
 
587
            return ret
 
588
 
 
589
    def _update_pack_cache(self):
 
590
        pack_files = set(self._pack_names())
 
591
        new_packs = []
 
592
        for basename in pack_files:
 
593
            pack_name = basename + ".pack"
 
594
            if basename not in self._pack_cache:
 
595
                try:
 
596
                    size = self.pack_transport.stat(pack_name).st_size
 
597
                except TransportNotPossible:
 
598
                    f = self.pack_transport.get(pack_name)
 
599
                    # TODO(jelmer): Don't read entire file into memory?
 
600
                    f = BytesIO(f.read())
 
601
                    pd = PackData(pack_name, f)
 
602
                else:
 
603
                    pd = PackData(
 
604
                        pack_name, self.pack_transport.get(pack_name),
 
605
                        size=size)
 
606
                idxname = basename + ".idx"
 
607
                idx = load_pack_index_file(
 
608
                    idxname, self.pack_transport.get(idxname))
 
609
                pack = Pack.from_objects(pd, idx)
 
610
                pack._basename = basename
 
611
                self._pack_cache[basename] = pack
 
612
                new_packs.append(pack)
 
613
        # Remove disappeared pack files
 
614
        for f in set(self._pack_cache) - pack_files:
 
615
            self._pack_cache.pop(f).close()
 
616
        return new_packs
 
617
 
 
618
    def _pack_names(self):
 
619
        pack_files = []
 
620
        try:
 
621
            dir_contents = self.pack_transport.list_dir(".")
 
622
            for name in dir_contents:
 
623
                if name.startswith("pack-") and name.endswith(".pack"):
 
624
                    # verify that idx exists first (otherwise the pack was not yet
 
625
                    # fully written)
 
626
                    idx_name = os.path.splitext(name)[0] + ".idx"
 
627
                    if idx_name in dir_contents:
 
628
                        pack_files.append(os.path.splitext(name)[0])
 
629
        except TransportNotPossible:
 
630
            try:
 
631
                f = self.transport.get('info/packs')
 
632
            except NoSuchFile:
 
633
                warning('No info/packs on remote host;'
 
634
                        'run \'git update-server-info\' on remote.')
 
635
            else:
 
636
                with f:
 
637
                    pack_files = [
 
638
                        os.path.splitext(name)[0]
 
639
                        for name in read_packs_file(f)]
 
640
        except NoSuchFile:
 
641
            pass
 
642
        return pack_files
 
643
 
 
644
    def _remove_pack(self, pack):
 
645
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
646
        self.pack_transport.delete(pack.data.filename)
 
647
        try:
 
648
            del self._pack_cache[os.path.basename(pack._basename)]
 
649
        except KeyError:
 
650
            pass
 
651
 
 
652
    def _iter_loose_objects(self):
 
653
        for base in self.transport.list_dir('.'):
 
654
            if len(base) != 2:
 
655
                continue
 
656
            for rest in self.transport.list_dir(base):
 
657
                yield (base + rest).encode(sys.getfilesystemencoding())
 
658
 
 
659
    def _split_loose_object(self, sha):
 
660
        return (sha[:2], sha[2:])
 
661
 
 
662
    def _remove_loose_object(self, sha):
 
663
        path = osutils.joinpath(self._split_loose_object(sha))
 
664
        self.transport.delete(urlutils.quote_from_bytes(path))
 
665
 
 
666
    def _get_loose_object(self, sha):
 
667
        path = osutils.joinpath(self._split_loose_object(sha))
 
668
        try:
 
669
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
 
670
                return ShaFile.from_file(f)
 
671
        except NoSuchFile:
 
672
            return None
 
673
 
 
674
    def add_object(self, obj):
 
675
        """Add a single object to this object store.
 
676
 
 
677
        :param obj: Object to add
 
678
        """
 
679
        (dir, file) = self._split_loose_object(obj.id)
 
680
        try:
 
681
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
 
682
        except FileExists:
 
683
            pass
 
684
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
 
685
        if self.transport.has(path):
 
686
            return  # Already there, no need to write again
 
687
        self.transport.put_bytes(path, obj.as_legacy_object())
 
688
 
 
689
    def move_in_pack(self, f):
 
690
        """Move a specific file containing a pack into the pack directory.
 
691
 
 
692
        :note: The file should be on the same file system as the
 
693
            packs directory.
 
694
 
 
695
        :param path: Path to the pack file.
 
696
        """
 
697
        f.seek(0)
 
698
        p = PackData("", f, len(f.getvalue()))
 
699
        entries = p.sorted_entries()
 
700
        basename = "pack-%s" % iter_sha1(entry[0]
 
701
                                         for entry in entries).decode('ascii')
 
702
        p._filename = basename + ".pack"
 
703
        f.seek(0)
 
704
        self.pack_transport.put_file(basename + ".pack", f)
 
705
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
 
706
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
707
        idxfile = self.pack_transport.get(basename + ".idx")
 
708
        idx = load_pack_index_file(basename + ".idx", idxfile)
 
709
        final_pack = Pack.from_objects(p, idx)
 
710
        final_pack._basename = basename
 
711
        self._add_cached_pack(basename, final_pack)
 
712
        return final_pack
 
713
 
 
714
    def move_in_thin_pack(self, f):
 
715
        """Move a specific file containing a pack into the pack directory.
 
716
 
 
717
        :note: The file should be on the same file system as the
 
718
            packs directory.
 
719
 
 
720
        :param path: Path to the pack file.
 
721
        """
 
722
        f.seek(0)
 
723
        p = Pack('', resolve_ext_ref=self.get_raw)
 
724
        p._data = PackData.from_file(f, len(f.getvalue()))
 
725
        p._data.pack = p
 
726
        p._idx_load = lambda: MemoryPackIndex(
 
727
            p.data.sorted_entries(), p.data.get_stored_checksum())
 
728
 
 
729
        pack_sha = p.index.objects_sha1()
 
730
 
 
731
        with self.pack_transport.open_write_stream(
 
732
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
 
733
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
734
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
735
        with self.pack_transport.open_write_stream(
 
736
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
 
737
            write_pack_index_v2(idxfile, entries, data_sum)
 
738
 
 
739
    def add_pack(self):
 
740
        """Add a new pack to this object store.
 
741
 
 
742
        :return: Fileobject to write to and a commit function to
 
743
            call when the pack is finished.
 
744
        """
 
745
        f = BytesIO()
 
746
 
 
747
        def commit():
 
748
            if len(f.getvalue()) > 0:
 
749
                return self.move_in_pack(f)
 
750
            else:
 
751
                return None
 
752
 
 
753
        def abort():
 
754
            return None
 
755
        return f, commit, abort
 
756
 
 
757
    @classmethod
 
758
    def init(cls, transport):
 
759
        try:
 
760
            transport.mkdir('info')
 
761
        except FileExists:
 
762
            pass
 
763
        try:
 
764
            transport.mkdir(PACKDIR)
 
765
        except FileExists:
 
766
            pass
 
767
        return cls(transport)