/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2019-10-13 22:53:02 UTC
  • mfrom: (7290.1.35 work)
  • mto: This revision was merged to the branch mainline in revision 7405.
  • Revision ID: jelmer@jelmer.uk-20191013225302-vg88ztajzq05hkas
Merge lp:brz/3.0.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""A Git repository implementation that uses a Bazaar transport."""
 
18
 
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
 
 
26
from dulwich.errors import (
 
27
    NoIndexPresent,
 
28
    )
 
29
from dulwich.file import (
 
30
    GitFile,
 
31
    FileLocked,
 
32
    )
 
33
from dulwich.objects import (
 
34
    ShaFile,
 
35
    )
 
36
from dulwich.object_store import (
 
37
    PackBasedObjectStore,
 
38
    PACKDIR,
 
39
    read_packs_file,
 
40
    )
 
41
from dulwich.pack import (
 
42
    MemoryPackIndex,
 
43
    PackData,
 
44
    Pack,
 
45
    iter_sha1,
 
46
    load_pack_index_file,
 
47
    write_pack_objects,
 
48
    write_pack_index_v2,
 
49
    )
 
50
from dulwich.repo import (
 
51
    BaseRepo,
 
52
    InfoRefsContainer,
 
53
    RefsContainer,
 
54
    BASE_DIRECTORIES,
 
55
    COMMONDIR,
 
56
    CONTROLDIR,
 
57
    INDEX_FILENAME,
 
58
    OBJECTDIR,
 
59
    SYMREF,
 
60
    check_ref_format,
 
61
    read_packed_refs_with_peeled,
 
62
    read_packed_refs,
 
63
    write_packed_refs,
 
64
    )
 
65
 
 
66
from .. import (
 
67
    osutils,
 
68
    transport as _mod_transport,
 
69
    urlutils,
 
70
    )
 
71
from ..errors import (
 
72
    AlreadyControlDirError,
 
73
    FileExists,
 
74
    LockBroken,
 
75
    LockContention,
 
76
    NotLocalUrl,
 
77
    NoSuchFile,
 
78
    ReadError,
 
79
    TransportNotPossible,
 
80
    )
 
81
 
 
82
from ..lock import LogicalLockResult
 
83
 
 
84
 
 
85
class TransportRefsContainer(RefsContainer):
 
86
    """Refs container that reads refs from a transport."""
 
87
 
 
88
    def __init__(self, transport, worktree_transport=None):
 
89
        self.transport = transport
 
90
        if worktree_transport is None:
 
91
            worktree_transport = transport
 
92
        self.worktree_transport = worktree_transport
 
93
        self._packed_refs = None
 
94
        self._peeled_refs = None
 
95
 
 
96
    def __repr__(self):
 
97
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
98
 
 
99
    def _ensure_dir_exists(self, path):
 
100
        for n in range(path.count("/")):
 
101
            dirname = "/".join(path.split("/")[:n + 1])
 
102
            try:
 
103
                self.transport.mkdir(dirname)
 
104
            except FileExists:
 
105
                pass
 
106
 
 
107
    def subkeys(self, base):
 
108
        """Refs present in this container under a base.
 
109
 
 
110
        :param base: The base to return refs under.
 
111
        :return: A set of valid refs in this container under the base; the base
 
112
            prefix is stripped from the ref names returned.
 
113
        """
 
114
        keys = set()
 
115
        base_len = len(base) + 1
 
116
        for refname in self.allkeys():
 
117
            if refname.startswith(base):
 
118
                keys.add(refname[base_len:])
 
119
        return keys
 
120
 
 
121
    def allkeys(self):
 
122
        keys = set()
 
123
        try:
 
124
            self.worktree_transport.get_bytes("HEAD")
 
125
        except NoSuchFile:
 
126
            pass
 
127
        else:
 
128
            keys.add(b"HEAD")
 
129
        try:
 
130
            iter_files = list(self.transport.clone(
 
131
                "refs").iter_files_recursive())
 
132
            for filename in iter_files:
 
133
                unquoted_filename = urlutils.unquote_to_bytes(filename)
 
134
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
135
                if check_ref_format(refname):
 
136
                    keys.add(refname)
 
137
        except (TransportNotPossible, NoSuchFile):
 
138
            pass
 
139
        keys.update(self.get_packed_refs())
 
140
        return keys
 
141
 
 
142
    def get_packed_refs(self):
 
143
        """Get contents of the packed-refs file.
 
144
 
 
145
        :return: Dictionary mapping ref names to SHA1s
 
146
 
 
147
        :note: Will return an empty dictionary when no packed-refs file is
 
148
            present.
 
149
        """
 
150
        # TODO: invalidate the cache on repacking
 
151
        if self._packed_refs is None:
 
152
            # set both to empty because we want _peeled_refs to be
 
153
            # None if and only if _packed_refs is also None.
 
154
            self._packed_refs = {}
 
155
            self._peeled_refs = {}
 
156
            try:
 
157
                f = self.transport.get("packed-refs")
 
158
            except NoSuchFile:
 
159
                return {}
 
160
            try:
 
161
                first_line = next(iter(f)).rstrip()
 
162
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
 
163
                        first_line):
 
164
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
 
165
                        self._packed_refs[name] = sha
 
166
                        if peeled:
 
167
                            self._peeled_refs[name] = peeled
 
168
                else:
 
169
                    f.seek(0)
 
170
                    for sha, name in read_packed_refs(f):
 
171
                        self._packed_refs[name] = sha
 
172
            finally:
 
173
                f.close()
 
174
        return self._packed_refs
 
175
 
 
176
    def get_peeled(self, name):
 
177
        """Return the cached peeled value of a ref, if available.
 
178
 
 
179
        :param name: Name of the ref to peel
 
180
        :return: The peeled value of the ref. If the ref is known not point to
 
181
            a tag, this will be the SHA the ref refers to. If the ref may point
 
182
            to a tag, but no cached information is available, None is returned.
 
183
        """
 
184
        self.get_packed_refs()
 
185
        if self._peeled_refs is None or name not in self._packed_refs:
 
186
            # No cache: no peeled refs were read, or this ref is loose
 
187
            return None
 
188
        if name in self._peeled_refs:
 
189
            return self._peeled_refs[name]
 
190
        else:
 
191
            # Known not peelable
 
192
            return self[name]
 
193
 
 
194
    def read_loose_ref(self, name):
 
195
        """Read a reference file and return its contents.
 
196
 
 
197
        If the reference file a symbolic reference, only read the first line of
 
198
        the file. Otherwise, only read the first 40 bytes.
 
199
 
 
200
        :param name: the refname to read, relative to refpath
 
201
        :return: The contents of the ref file, or None if the file does not
 
202
            exist.
 
203
        :raises IOError: if any other error occurs
 
204
        """
 
205
        if name == b'HEAD':
 
206
            transport = self.worktree_transport
 
207
        else:
 
208
            transport = self.transport
 
209
        try:
 
210
            f = transport.get(urlutils.quote_from_bytes(name))
 
211
        except NoSuchFile:
 
212
            return None
 
213
        with f:
 
214
            header = f.read(len(SYMREF))
 
215
            if header == SYMREF:
 
216
                # Read only the first line
 
217
                return header + next(iter(f)).rstrip(b"\r\n")
 
218
            else:
 
219
                # Read only the first 40 bytes
 
220
                return header + f.read(40 - len(SYMREF))
 
221
 
 
222
    def _remove_packed_ref(self, name):
 
223
        if self._packed_refs is None:
 
224
            return
 
225
        # reread cached refs from disk, while holding the lock
 
226
 
 
227
        self._packed_refs = None
 
228
        self.get_packed_refs()
 
229
 
 
230
        if name not in self._packed_refs:
 
231
            return
 
232
 
 
233
        del self._packed_refs[name]
 
234
        if name in self._peeled_refs:
 
235
            del self._peeled_refs[name]
 
236
        with self.transport.open_write_stream("packed-refs") as f:
 
237
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
238
 
 
239
    def set_symbolic_ref(self, name, other):
 
240
        """Make a ref point at another ref.
 
241
 
 
242
        :param name: Name of the ref to set
 
243
        :param other: Name of the ref to point at
 
244
        """
 
245
        self._check_refname(name)
 
246
        self._check_refname(other)
 
247
        if name != b'HEAD':
 
248
            transport = self.transport
 
249
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
250
        else:
 
251
            transport = self.worktree_transport
 
252
        transport.put_bytes(urlutils.quote_from_bytes(
 
253
            name), SYMREF + other + b'\n')
 
254
 
 
255
    def set_if_equals(self, name, old_ref, new_ref):
 
256
        """Set a refname to new_ref only if it currently equals old_ref.
 
257
 
 
258
        This method follows all symbolic references, and can be used to perform
 
259
        an atomic compare-and-swap operation.
 
260
 
 
261
        :param name: The refname to set.
 
262
        :param old_ref: The old sha the refname must refer to, or None to set
 
263
            unconditionally.
 
264
        :param new_ref: The new sha the refname will refer to.
 
265
        :return: True if the set was successful, False otherwise.
 
266
        """
 
267
        try:
 
268
            realnames, _ = self.follow(name)
 
269
            realname = realnames[-1]
 
270
        except (KeyError, IndexError):
 
271
            realname = name
 
272
        if realname == b'HEAD':
 
273
            transport = self.worktree_transport
 
274
        else:
 
275
            transport = self.transport
 
276
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
277
        transport.put_bytes(urlutils.quote_from_bytes(
 
278
            realname), new_ref + b"\n")
 
279
        return True
 
280
 
 
281
    def add_if_new(self, name, ref):
 
282
        """Add a new reference only if it does not already exist.
 
283
 
 
284
        This method follows symrefs, and only ensures that the last ref in the
 
285
        chain does not exist.
 
286
 
 
287
        :param name: The refname to set.
 
288
        :param ref: The new sha the refname will refer to.
 
289
        :return: True if the add was successful, False otherwise.
 
290
        """
 
291
        try:
 
292
            realnames, contents = self.follow(name)
 
293
            if contents is not None:
 
294
                return False
 
295
            realname = realnames[-1]
 
296
        except (KeyError, IndexError):
 
297
            realname = name
 
298
        self._check_refname(realname)
 
299
        if realname == b'HEAD':
 
300
            transport = self.worktree_transport
 
301
        else:
 
302
            transport = self.transport
 
303
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
304
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
 
305
        return True
 
306
 
 
307
    def remove_if_equals(self, name, old_ref):
 
308
        """Remove a refname only if it currently equals old_ref.
 
309
 
 
310
        This method does not follow symbolic references. It can be used to
 
311
        perform an atomic compare-and-delete operation.
 
312
 
 
313
        :param name: The refname to delete.
 
314
        :param old_ref: The old sha the refname must refer to, or None to
 
315
            delete unconditionally.
 
316
        :return: True if the delete was successful, False otherwise.
 
317
        """
 
318
        self._check_refname(name)
 
319
        # may only be packed
 
320
        if name == b'HEAD':
 
321
            transport = self.worktree_transport
 
322
        else:
 
323
            transport = self.transport
 
324
        try:
 
325
            transport.delete(urlutils.quote_from_bytes(name))
 
326
        except NoSuchFile:
 
327
            pass
 
328
        self._remove_packed_ref(name)
 
329
        return True
 
330
 
 
331
    def get(self, name, default=None):
 
332
        try:
 
333
            return self[name]
 
334
        except KeyError:
 
335
            return default
 
336
 
 
337
    def unlock_ref(self, name):
 
338
        if name == b"HEAD":
 
339
            transport = self.worktree_transport
 
340
        else:
 
341
            transport = self.transport
 
342
        lockname = name + b".lock"
 
343
        try:
 
344
            transport.delete(urlutils.quote_from_bytes(lockname))
 
345
        except NoSuchFile:
 
346
            pass
 
347
 
 
348
    def lock_ref(self, name):
 
349
        if name == b"HEAD":
 
350
            transport = self.worktree_transport
 
351
        else:
 
352
            transport = self.transport
 
353
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
354
        lockname = urlutils.quote_from_bytes(name + b".lock")
 
355
        try:
 
356
            local_path = transport.local_abspath(
 
357
                urlutils.quote_from_bytes(name))
 
358
        except NotLocalUrl:
 
359
            # This is racy, but what can we do?
 
360
            if transport.has(lockname):
 
361
                raise LockContention(name)
 
362
            transport.put_bytes(lockname, b'Locked by brz-git')
 
363
            return LogicalLockResult(lambda: transport.delete(lockname))
 
364
        else:
 
365
            try:
 
366
                gf = GitFile(local_path, 'wb')
 
367
            except FileLocked as e:
 
368
                raise LockContention(name, e)
 
369
            else:
 
370
                def unlock():
 
371
                    try:
 
372
                        transport.delete(lockname)
 
373
                    except NoSuchFile:
 
374
                        raise LockBroken(lockname)
 
375
                    # GitFile.abort doesn't care if the lock has already
 
376
                    # disappeared
 
377
                    gf.abort()
 
378
                return LogicalLockResult(unlock)
 
379
 
 
380
 
 
381
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
 
382
# rather than bytes..
 
383
def read_gitfile(f):
 
384
    """Read a ``.git`` file.
 
385
 
 
386
    The first line of the file should start with "gitdir: "
 
387
 
 
388
    :param f: File-like object to read from
 
389
    :return: A path
 
390
    """
 
391
    cs = f.read()
 
392
    if not cs.startswith(b"gitdir: "):
 
393
        raise ValueError("Expected file to start with 'gitdir: '")
 
394
    return cs[len(b"gitdir: "):].rstrip(b"\n")
 
395
 
 
396
 
 
397
class TransportRepo(BaseRepo):
 
398
 
 
399
    def __init__(self, transport, bare, refs_text=None):
 
400
        self.transport = transport
 
401
        self.bare = bare
 
402
        try:
 
403
            with transport.get(CONTROLDIR) as f:
 
404
                path = read_gitfile(f)
 
405
        except (ReadError, NoSuchFile):
 
406
            if self.bare:
 
407
                self._controltransport = self.transport
 
408
            else:
 
409
                self._controltransport = self.transport.clone('.git')
 
410
        else:
 
411
            self._controltransport = self.transport.clone(
 
412
                urlutils.quote_from_bytes(path))
 
413
        commondir = self.get_named_file(COMMONDIR)
 
414
        if commondir is not None:
 
415
            with commondir:
 
416
                commondir = os.path.join(
 
417
                    self.controldir(),
 
418
                    commondir.read().rstrip(b"\r\n").decode(
 
419
                        sys.getfilesystemencoding()))
 
420
                self._commontransport = \
 
421
                    _mod_transport.get_transport_from_path(commondir)
 
422
        else:
 
423
            self._commontransport = self._controltransport
 
424
        object_store = TransportObjectStore(
 
425
            self._commontransport.clone(OBJECTDIR))
 
426
        if refs_text is not None:
 
427
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
428
            try:
 
429
                head = TransportRefsContainer(
 
430
                    self._commontransport).read_loose_ref(b"HEAD")
 
431
            except KeyError:
 
432
                pass
 
433
            else:
 
434
                refs_container._refs[b"HEAD"] = head
 
435
        else:
 
436
            refs_container = TransportRefsContainer(
 
437
                self._commontransport, self._controltransport)
 
438
        super(TransportRepo, self).__init__(object_store,
 
439
                                            refs_container)
 
440
 
 
441
    def controldir(self):
 
442
        return self._controltransport.local_abspath('.')
 
443
 
 
444
    def commondir(self):
 
445
        return self._commontransport.local_abspath('.')
 
446
 
 
447
    @property
 
448
    def path(self):
 
449
        return self.transport.local_abspath('.')
 
450
 
 
451
    def _determine_file_mode(self):
 
452
        # Be consistent with bzr
 
453
        if sys.platform == 'win32':
 
454
            return False
 
455
        return True
 
456
 
 
457
    def get_named_file(self, path):
 
458
        """Get a file from the control dir with a specific name.
 
459
 
 
460
        Although the filename should be interpreted as a filename relative to
 
461
        the control dir in a disk-baked Repo, the object returned need not be
 
462
        pointing to a file in that location.
 
463
 
 
464
        :param path: The path to the file, relative to the control dir.
 
465
        :return: An open file object, or None if the file does not exist.
 
466
        """
 
467
        try:
 
468
            return self._controltransport.get(path.lstrip('/'))
 
469
        except NoSuchFile:
 
470
            return None
 
471
 
 
472
    def _put_named_file(self, relpath, contents):
 
473
        self._controltransport.put_bytes(relpath, contents)
 
474
 
 
475
    def index_path(self):
 
476
        """Return the path to the index file."""
 
477
        return self._controltransport.local_abspath(INDEX_FILENAME)
 
478
 
 
479
    def open_index(self):
 
480
        """Open the index for this repository."""
 
481
        from dulwich.index import Index
 
482
        if not self.has_index():
 
483
            raise NoIndexPresent()
 
484
        return Index(self.index_path())
 
485
 
 
486
    def has_index(self):
 
487
        """Check if an index is present."""
 
488
        # Bare repos must never have index files; non-bare repos may have a
 
489
        # missing index file, which is treated as empty.
 
490
        return not self.bare
 
491
 
 
492
    def get_config(self):
 
493
        from dulwich.config import ConfigFile
 
494
        try:
 
495
            with self._controltransport.get('config') as f:
 
496
                return ConfigFile.from_file(f)
 
497
        except NoSuchFile:
 
498
            return ConfigFile()
 
499
 
 
500
    def get_config_stack(self):
 
501
        from dulwich.config import StackedConfig
 
502
        backends = []
 
503
        p = self.get_config()
 
504
        if p is not None:
 
505
            backends.append(p)
 
506
            writable = p
 
507
        else:
 
508
            writable = None
 
509
        backends.extend(StackedConfig.default_backends())
 
510
        return StackedConfig(backends, writable=writable)
 
511
 
 
512
    def __repr__(self):
 
513
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
 
514
 
 
515
    @classmethod
 
516
    def init(cls, transport, bare=False):
 
517
        if not bare:
 
518
            try:
 
519
                transport.mkdir(".git")
 
520
            except FileExists:
 
521
                raise AlreadyControlDirError(transport.base)
 
522
            control_transport = transport.clone(".git")
 
523
        else:
 
524
            control_transport = transport
 
525
        for d in BASE_DIRECTORIES:
 
526
            try:
 
527
                control_transport.mkdir("/".join(d))
 
528
            except FileExists:
 
529
                pass
 
530
        try:
 
531
            control_transport.mkdir(OBJECTDIR)
 
532
        except FileExists:
 
533
            raise AlreadyControlDirError(transport.base)
 
534
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
535
        ret = cls(transport, bare)
 
536
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
537
        ret._init_files(bare)
 
538
        return ret
 
539
 
 
540
 
 
541
class TransportObjectStore(PackBasedObjectStore):
 
542
    """Git-style object store that exists on disk."""
 
543
 
 
544
    def __init__(self, transport):
 
545
        """Open an object store.
 
546
 
 
547
        :param transport: Transport to open data from
 
548
        """
 
549
        super(TransportObjectStore, self).__init__()
 
550
        self.transport = transport
 
551
        self.pack_transport = self.transport.clone(PACKDIR)
 
552
        self._alternates = None
 
553
 
 
554
    def __eq__(self, other):
 
555
        if not isinstance(other, TransportObjectStore):
 
556
            return False
 
557
        return self.transport == other.transport
 
558
 
 
559
    def __repr__(self):
 
560
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
561
 
 
562
    @property
 
563
    def alternates(self):
 
564
        if self._alternates is not None:
 
565
            return self._alternates
 
566
        self._alternates = []
 
567
        for path in self._read_alternate_paths():
 
568
            # FIXME: Check path
 
569
            t = _mod_transport.get_transport_from_path(path)
 
570
            self._alternates.append(self.__class__(t))
 
571
        return self._alternates
 
572
 
 
573
    def _read_alternate_paths(self):
 
574
        try:
 
575
            f = self.transport.get("info/alternates")
 
576
        except NoSuchFile:
 
577
            return []
 
578
        ret = []
 
579
        with f:
 
580
            for l in f.read().splitlines():
 
581
                if l[0] == b"#":
 
582
                    continue
 
583
                if os.path.isabs(l):
 
584
                    continue
 
585
                ret.append(l)
 
586
            return ret
 
587
 
 
588
    def _update_pack_cache(self):
 
589
        pack_files = set()
 
590
        pack_dir_contents = self._pack_names()
 
591
        for name in pack_dir_contents:
 
592
            if name.startswith("pack-") and name.endswith(".pack"):
 
593
                # verify that idx exists first (otherwise the pack was not yet
 
594
                # fully written)
 
595
                idx_name = os.path.splitext(name)[0] + ".idx"
 
596
                if idx_name in pack_dir_contents:
 
597
                    pack_files.add(os.path.splitext(name)[0])
 
598
 
 
599
        new_packs = []
 
600
        for basename in pack_files:
 
601
            pack_name = basename + ".pack"
 
602
            if basename not in self._pack_cache:
 
603
                try:
 
604
                    size = self.pack_transport.stat(pack_name).st_size
 
605
                except TransportNotPossible:
 
606
                    f = self.pack_transport.get(pack_name)
 
607
                    pd = PackData(pack_name, f)
 
608
                else:
 
609
                    pd = PackData(
 
610
                        pack_name, self.pack_transport.get(pack_name),
 
611
                        size=size)
 
612
                idxname = basename + ".idx"
 
613
                idx = load_pack_index_file(
 
614
                    idxname, self.pack_transport.get(idxname))
 
615
                pack = Pack.from_objects(pd, idx)
 
616
                pack._basename = basename
 
617
                self._pack_cache[basename] = pack
 
618
                new_packs.append(pack)
 
619
        # Remove disappeared pack files
 
620
        for f in set(self._pack_cache) - pack_files:
 
621
            self._pack_cache.pop(f).close()
 
622
        return new_packs
 
623
 
 
624
    def _pack_names(self):
 
625
        try:
 
626
            return self.pack_transport.list_dir(".")
 
627
        except TransportNotPossible:
 
628
            try:
 
629
                f = self.transport.get('info/packs')
 
630
            except NoSuchFile:
 
631
                # Hmm, warn about running 'git update-server-info' ?
 
632
                return iter([])
 
633
            else:
 
634
                with f:
 
635
                    return read_packs_file(f)
 
636
        except NoSuchFile:
 
637
            return iter([])
 
638
 
 
639
    def _remove_pack(self, pack):
 
640
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
641
        self.pack_transport.delete(pack.data.filename)
 
642
        try:
 
643
            del self._pack_cache[os.path.basename(pack._basename)]
 
644
        except KeyError:
 
645
            pass
 
646
 
 
647
    def _iter_loose_objects(self):
 
648
        for base in self.transport.list_dir('.'):
 
649
            if len(base) != 2:
 
650
                continue
 
651
            for rest in self.transport.list_dir(base):
 
652
                yield (base + rest).encode(sys.getfilesystemencoding())
 
653
 
 
654
    def _split_loose_object(self, sha):
 
655
        return (sha[:2], sha[2:])
 
656
 
 
657
    def _remove_loose_object(self, sha):
 
658
        path = osutils.joinpath(self._split_loose_object(sha))
 
659
        self.transport.delete(urlutils.quote_from_bytes(path))
 
660
 
 
661
    def _get_loose_object(self, sha):
 
662
        path = osutils.joinpath(self._split_loose_object(sha))
 
663
        try:
 
664
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
 
665
                return ShaFile.from_file(f)
 
666
        except NoSuchFile:
 
667
            return None
 
668
 
 
669
    def add_object(self, obj):
 
670
        """Add a single object to this object store.
 
671
 
 
672
        :param obj: Object to add
 
673
        """
 
674
        (dir, file) = self._split_loose_object(obj.id)
 
675
        try:
 
676
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
 
677
        except FileExists:
 
678
            pass
 
679
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
 
680
        if self.transport.has(path):
 
681
            return  # Already there, no need to write again
 
682
        self.transport.put_bytes(path, obj.as_legacy_object())
 
683
 
 
684
    def move_in_pack(self, f):
 
685
        """Move a specific file containing a pack into the pack directory.
 
686
 
 
687
        :note: The file should be on the same file system as the
 
688
            packs directory.
 
689
 
 
690
        :param path: Path to the pack file.
 
691
        """
 
692
        f.seek(0)
 
693
        p = PackData("", f, len(f.getvalue()))
 
694
        entries = p.sorted_entries()
 
695
        basename = "pack-%s" % iter_sha1(entry[0]
 
696
                                         for entry in entries).decode('ascii')
 
697
        p._filename = basename + ".pack"
 
698
        f.seek(0)
 
699
        self.pack_transport.put_file(basename + ".pack", f)
 
700
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
 
701
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
702
        idxfile = self.pack_transport.get(basename + ".idx")
 
703
        idx = load_pack_index_file(basename + ".idx", idxfile)
 
704
        final_pack = Pack.from_objects(p, idx)
 
705
        final_pack._basename = basename
 
706
        self._add_cached_pack(basename, final_pack)
 
707
        return final_pack
 
708
 
 
709
    def move_in_thin_pack(self, f):
 
710
        """Move a specific file containing a pack into the pack directory.
 
711
 
 
712
        :note: The file should be on the same file system as the
 
713
            packs directory.
 
714
 
 
715
        :param path: Path to the pack file.
 
716
        """
 
717
        f.seek(0)
 
718
        p = Pack('', resolve_ext_ref=self.get_raw)
 
719
        p._data = PackData.from_file(f, len(f.getvalue()))
 
720
        p._data.pack = p
 
721
        p._idx_load = lambda: MemoryPackIndex(
 
722
            p.data.sorted_entries(), p.data.get_stored_checksum())
 
723
 
 
724
        pack_sha = p.index.objects_sha1()
 
725
 
 
726
        with self.pack_transport.open_write_stream(
 
727
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
 
728
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
729
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
730
        with self.pack_transport.open_write_stream(
 
731
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
 
732
            write_pack_index_v2(idxfile, entries, data_sum)
 
733
 
 
734
    def add_pack(self):
 
735
        """Add a new pack to this object store.
 
736
 
 
737
        :return: Fileobject to write to and a commit function to
 
738
            call when the pack is finished.
 
739
        """
 
740
        f = BytesIO()
 
741
 
 
742
        def commit():
 
743
            if len(f.getvalue()) > 0:
 
744
                return self.move_in_pack(f)
 
745
            else:
 
746
                return None
 
747
 
 
748
        def abort():
 
749
            return None
 
750
        return f, commit, abort
 
751
 
 
752
    @classmethod
 
753
    def init(cls, transport):
 
754
        try:
 
755
            transport.mkdir('info')
 
756
        except FileExists:
 
757
            pass
 
758
        try:
 
759
            transport.mkdir(PACKDIR)
 
760
        except FileExists:
 
761
            pass
 
762
        return cls(transport)