/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2020-01-31 02:56:40 UTC
  • mto: (7290.42.5 work)
  • mto: This revision was merged to the branch mainline in revision 7476.
  • Revision ID: jelmer@jelmer.uk-20200131025640-njh60c73nvr551x8
Drop Serializer.write_revision.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""A Git repository implementation that uses a Bazaar transport."""
 
18
 
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
 
 
26
from dulwich.errors import (
 
27
    NoIndexPresent,
 
28
    )
 
29
from dulwich.file import (
 
30
    GitFile,
 
31
    FileLocked,
 
32
    )
 
33
from dulwich.objects import (
 
34
    ShaFile,
 
35
    )
 
36
from dulwich.object_store import (
 
37
    PackBasedObjectStore,
 
38
    PACKDIR,
 
39
    read_packs_file,
 
40
    )
 
41
from dulwich.pack import (
 
42
    MemoryPackIndex,
 
43
    PackData,
 
44
    Pack,
 
45
    iter_sha1,
 
46
    load_pack_index_file,
 
47
    write_pack_objects,
 
48
    write_pack_index_v2,
 
49
    )
 
50
from dulwich.repo import (
 
51
    BaseRepo,
 
52
    InfoRefsContainer,
 
53
    RefsContainer,
 
54
    BASE_DIRECTORIES,
 
55
    COMMONDIR,
 
56
    CONTROLDIR,
 
57
    INDEX_FILENAME,
 
58
    OBJECTDIR,
 
59
    SYMREF,
 
60
    check_ref_format,
 
61
    read_packed_refs_with_peeled,
 
62
    read_packed_refs,
 
63
    write_packed_refs,
 
64
    )
 
65
 
 
66
from .. import (
 
67
    osutils,
 
68
    transport as _mod_transport,
 
69
    urlutils,
 
70
    )
 
71
from ..errors import (
 
72
    AlreadyControlDirError,
 
73
    FileExists,
 
74
    LockBroken,
 
75
    LockContention,
 
76
    NotLocalUrl,
 
77
    NoSuchFile,
 
78
    ReadError,
 
79
    TransportNotPossible,
 
80
    )
 
81
 
 
82
from ..lock import LogicalLockResult
 
83
 
 
84
 
 
85
class TransportRefsContainer(RefsContainer):
 
86
    """Refs container that reads refs from a transport."""
 
87
 
 
88
    def __init__(self, transport, worktree_transport=None):
 
89
        self.transport = transport
 
90
        if worktree_transport is None:
 
91
            worktree_transport = transport
 
92
        self.worktree_transport = worktree_transport
 
93
        self._packed_refs = None
 
94
        self._peeled_refs = None
 
95
 
 
96
    def __repr__(self):
 
97
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
98
 
 
99
    def _ensure_dir_exists(self, path):
 
100
        for n in range(path.count("/")):
 
101
            dirname = "/".join(path.split("/")[:n + 1])
 
102
            try:
 
103
                self.transport.mkdir(dirname)
 
104
            except FileExists:
 
105
                pass
 
106
 
 
107
    def subkeys(self, base):
 
108
        """Refs present in this container under a base.
 
109
 
 
110
        :param base: The base to return refs under.
 
111
        :return: A set of valid refs in this container under the base; the base
 
112
            prefix is stripped from the ref names returned.
 
113
        """
 
114
        keys = set()
 
115
        base_len = len(base) + 1
 
116
        for refname in self.allkeys():
 
117
            if refname.startswith(base):
 
118
                keys.add(refname[base_len:])
 
119
        return keys
 
120
 
 
121
    def allkeys(self):
 
122
        keys = set()
 
123
        try:
 
124
            self.worktree_transport.get_bytes("HEAD")
 
125
        except NoSuchFile:
 
126
            pass
 
127
        else:
 
128
            keys.add(b"HEAD")
 
129
        try:
 
130
            iter_files = list(self.transport.clone(
 
131
                "refs").iter_files_recursive())
 
132
            for filename in iter_files:
 
133
                unquoted_filename = urlutils.unquote_to_bytes(filename)
 
134
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
135
                if check_ref_format(refname):
 
136
                    keys.add(refname)
 
137
        except (TransportNotPossible, NoSuchFile):
 
138
            pass
 
139
        keys.update(self.get_packed_refs())
 
140
        return keys
 
141
 
 
142
    def get_packed_refs(self):
 
143
        """Get contents of the packed-refs file.
 
144
 
 
145
        :return: Dictionary mapping ref names to SHA1s
 
146
 
 
147
        :note: Will return an empty dictionary when no packed-refs file is
 
148
            present.
 
149
        """
 
150
        # TODO: invalidate the cache on repacking
 
151
        if self._packed_refs is None:
 
152
            # set both to empty because we want _peeled_refs to be
 
153
            # None if and only if _packed_refs is also None.
 
154
            self._packed_refs = {}
 
155
            self._peeled_refs = {}
 
156
            try:
 
157
                f = self.transport.get("packed-refs")
 
158
            except NoSuchFile:
 
159
                return {}
 
160
            try:
 
161
                first_line = next(iter(f)).rstrip()
 
162
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
 
163
                        first_line):
 
164
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
 
165
                        self._packed_refs[name] = sha
 
166
                        if peeled:
 
167
                            self._peeled_refs[name] = peeled
 
168
                else:
 
169
                    f.seek(0)
 
170
                    for sha, name in read_packed_refs(f):
 
171
                        self._packed_refs[name] = sha
 
172
            finally:
 
173
                f.close()
 
174
        return self._packed_refs
 
175
 
 
176
    def get_peeled(self, name):
 
177
        """Return the cached peeled value of a ref, if available.
 
178
 
 
179
        :param name: Name of the ref to peel
 
180
        :return: The peeled value of the ref. If the ref is known not point to
 
181
            a tag, this will be the SHA the ref refers to. If the ref may point
 
182
            to a tag, but no cached information is available, None is returned.
 
183
        """
 
184
        self.get_packed_refs()
 
185
        if self._peeled_refs is None or name not in self._packed_refs:
 
186
            # No cache: no peeled refs were read, or this ref is loose
 
187
            return None
 
188
        if name in self._peeled_refs:
 
189
            return self._peeled_refs[name]
 
190
        else:
 
191
            # Known not peelable
 
192
            return self[name]
 
193
 
 
194
    def read_loose_ref(self, name):
 
195
        """Read a reference file and return its contents.
 
196
 
 
197
        If the reference file a symbolic reference, only read the first line of
 
198
        the file. Otherwise, only read the first 40 bytes.
 
199
 
 
200
        :param name: the refname to read, relative to refpath
 
201
        :return: The contents of the ref file, or None if the file does not
 
202
            exist.
 
203
        :raises IOError: if any other error occurs
 
204
        """
 
205
        if name == b'HEAD':
 
206
            transport = self.worktree_transport
 
207
        else:
 
208
            transport = self.transport
 
209
        try:
 
210
            f = transport.get(urlutils.quote_from_bytes(name))
 
211
        except NoSuchFile:
 
212
            return None
 
213
        with f:
 
214
            try:
 
215
                header = f.read(len(SYMREF))
 
216
            except ReadError:
 
217
                # probably a directory
 
218
                return None
 
219
            if header == SYMREF:
 
220
                # Read only the first line
 
221
                return header + next(iter(f)).rstrip(b"\r\n")
 
222
            else:
 
223
                # Read only the first 40 bytes
 
224
                return header + f.read(40 - len(SYMREF))
 
225
 
 
226
    def _remove_packed_ref(self, name):
 
227
        if self._packed_refs is None:
 
228
            return
 
229
        # reread cached refs from disk, while holding the lock
 
230
 
 
231
        self._packed_refs = None
 
232
        self.get_packed_refs()
 
233
 
 
234
        if name not in self._packed_refs:
 
235
            return
 
236
 
 
237
        del self._packed_refs[name]
 
238
        if name in self._peeled_refs:
 
239
            del self._peeled_refs[name]
 
240
        f = self.transport.open_write_stream("packed-refs")
 
241
        try:
 
242
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
243
        finally:
 
244
            f.close()
 
245
 
 
246
    def set_symbolic_ref(self, name, other):
 
247
        """Make a ref point at another ref.
 
248
 
 
249
        :param name: Name of the ref to set
 
250
        :param other: Name of the ref to point at
 
251
        """
 
252
        self._check_refname(name)
 
253
        self._check_refname(other)
 
254
        if name != b'HEAD':
 
255
            transport = self.transport
 
256
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
257
        else:
 
258
            transport = self.worktree_transport
 
259
        transport.put_bytes(urlutils.quote_from_bytes(
 
260
            name), SYMREF + other + b'\n')
 
261
 
 
262
    def set_if_equals(self, name, old_ref, new_ref):
 
263
        """Set a refname to new_ref only if it currently equals old_ref.
 
264
 
 
265
        This method follows all symbolic references, and can be used to perform
 
266
        an atomic compare-and-swap operation.
 
267
 
 
268
        :param name: The refname to set.
 
269
        :param old_ref: The old sha the refname must refer to, or None to set
 
270
            unconditionally.
 
271
        :param new_ref: The new sha the refname will refer to.
 
272
        :return: True if the set was successful, False otherwise.
 
273
        """
 
274
        try:
 
275
            realnames, _ = self.follow(name)
 
276
            realname = realnames[-1]
 
277
        except (KeyError, IndexError):
 
278
            realname = name
 
279
        if realname == b'HEAD':
 
280
            transport = self.worktree_transport
 
281
        else:
 
282
            transport = self.transport
 
283
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
284
        transport.put_bytes(urlutils.quote_from_bytes(
 
285
            realname), new_ref + b"\n")
 
286
        return True
 
287
 
 
288
    def add_if_new(self, name, ref):
 
289
        """Add a new reference only if it does not already exist.
 
290
 
 
291
        This method follows symrefs, and only ensures that the last ref in the
 
292
        chain does not exist.
 
293
 
 
294
        :param name: The refname to set.
 
295
        :param ref: The new sha the refname will refer to.
 
296
        :return: True if the add was successful, False otherwise.
 
297
        """
 
298
        try:
 
299
            realnames, contents = self.follow(name)
 
300
            if contents is not None:
 
301
                return False
 
302
            realname = realnames[-1]
 
303
        except (KeyError, IndexError):
 
304
            realname = name
 
305
        self._check_refname(realname)
 
306
        if realname == b'HEAD':
 
307
            transport = self.worktree_transport
 
308
        else:
 
309
            transport = self.transport
 
310
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
311
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
 
312
        return True
 
313
 
 
314
    def remove_if_equals(self, name, old_ref):
 
315
        """Remove a refname only if it currently equals old_ref.
 
316
 
 
317
        This method does not follow symbolic references. It can be used to
 
318
        perform an atomic compare-and-delete operation.
 
319
 
 
320
        :param name: The refname to delete.
 
321
        :param old_ref: The old sha the refname must refer to, or None to
 
322
            delete unconditionally.
 
323
        :return: True if the delete was successful, False otherwise.
 
324
        """
 
325
        self._check_refname(name)
 
326
        # may only be packed
 
327
        if name == b'HEAD':
 
328
            transport = self.worktree_transport
 
329
        else:
 
330
            transport = self.transport
 
331
        try:
 
332
            transport.delete(urlutils.quote_from_bytes(name))
 
333
        except NoSuchFile:
 
334
            pass
 
335
        self._remove_packed_ref(name)
 
336
        return True
 
337
 
 
338
    def get(self, name, default=None):
 
339
        try:
 
340
            return self[name]
 
341
        except KeyError:
 
342
            return default
 
343
 
 
344
    def unlock_ref(self, name):
 
345
        if name == b"HEAD":
 
346
            transport = self.worktree_transport
 
347
        else:
 
348
            transport = self.transport
 
349
        lockname = name + b".lock"
 
350
        try:
 
351
            transport.delete(urlutils.quote_from_bytes(lockname))
 
352
        except NoSuchFile:
 
353
            pass
 
354
 
 
355
    def lock_ref(self, name):
 
356
        if name == b"HEAD":
 
357
            transport = self.worktree_transport
 
358
        else:
 
359
            transport = self.transport
 
360
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
361
        lockname = urlutils.quote_from_bytes(name + b".lock")
 
362
        try:
 
363
            local_path = transport.local_abspath(
 
364
                urlutils.quote_from_bytes(name))
 
365
        except NotLocalUrl:
 
366
            # This is racy, but what can we do?
 
367
            if transport.has(lockname):
 
368
                raise LockContention(name)
 
369
            transport.put_bytes(lockname, b'Locked by brz-git')
 
370
            return LogicalLockResult(lambda: transport.delete(lockname))
 
371
        else:
 
372
            try:
 
373
                gf = GitFile(local_path, 'wb')
 
374
            except FileLocked as e:
 
375
                raise LockContention(name, e)
 
376
            else:
 
377
                def unlock():
 
378
                    try:
 
379
                        transport.delete(lockname)
 
380
                    except NoSuchFile:
 
381
                        raise LockBroken(lockname)
 
382
                    # GitFile.abort doesn't care if the lock has already
 
383
                    # disappeared
 
384
                    gf.abort()
 
385
                return LogicalLockResult(unlock)
 
386
 
 
387
 
 
388
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
 
389
# rather than bytes..
 
390
def read_gitfile(f):
 
391
    """Read a ``.git`` file.
 
392
 
 
393
    The first line of the file should start with "gitdir: "
 
394
 
 
395
    :param f: File-like object to read from
 
396
    :return: A path
 
397
    """
 
398
    cs = f.read()
 
399
    if not cs.startswith(b"gitdir: "):
 
400
        raise ValueError("Expected file to start with 'gitdir: '")
 
401
    return cs[len(b"gitdir: "):].rstrip(b"\n")
 
402
 
 
403
 
 
404
class TransportRepo(BaseRepo):
 
405
 
 
406
    def __init__(self, transport, bare, refs_text=None):
 
407
        self.transport = transport
 
408
        self.bare = bare
 
409
        try:
 
410
            with transport.get(CONTROLDIR) as f:
 
411
                path = read_gitfile(f)
 
412
        except (ReadError, NoSuchFile):
 
413
            if self.bare:
 
414
                self._controltransport = self.transport
 
415
            else:
 
416
                self._controltransport = self.transport.clone('.git')
 
417
        else:
 
418
            self._controltransport = self.transport.clone(
 
419
                urlutils.quote_from_bytes(path))
 
420
        commondir = self.get_named_file(COMMONDIR)
 
421
        if commondir is not None:
 
422
            with commondir:
 
423
                commondir = os.path.join(
 
424
                    self.controldir(),
 
425
                    commondir.read().rstrip(b"\r\n").decode(
 
426
                        sys.getfilesystemencoding()))
 
427
                self._commontransport = \
 
428
                    _mod_transport.get_transport_from_path(commondir)
 
429
        else:
 
430
            self._commontransport = self._controltransport
 
431
        object_store = TransportObjectStore(
 
432
            self._commontransport.clone(OBJECTDIR))
 
433
        if refs_text is not None:
 
434
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
435
            try:
 
436
                head = TransportRefsContainer(
 
437
                    self._commontransport).read_loose_ref("HEAD")
 
438
            except KeyError:
 
439
                pass
 
440
            else:
 
441
                refs_container._refs["HEAD"] = head
 
442
        else:
 
443
            refs_container = TransportRefsContainer(
 
444
                self._commontransport, self._controltransport)
 
445
        super(TransportRepo, self).__init__(object_store,
 
446
                                            refs_container)
 
447
 
 
448
    def controldir(self):
 
449
        return self._controltransport.local_abspath('.')
 
450
 
 
451
    def commondir(self):
 
452
        return self._commontransport.local_abspath('.')
 
453
 
 
454
    @property
 
455
    def path(self):
 
456
        return self.transport.local_abspath('.')
 
457
 
 
458
    def _determine_file_mode(self):
 
459
        # Be consistent with bzr
 
460
        if sys.platform == 'win32':
 
461
            return False
 
462
        return True
 
463
 
 
464
    def get_named_file(self, path):
 
465
        """Get a file from the control dir with a specific name.
 
466
 
 
467
        Although the filename should be interpreted as a filename relative to
 
468
        the control dir in a disk-baked Repo, the object returned need not be
 
469
        pointing to a file in that location.
 
470
 
 
471
        :param path: The path to the file, relative to the control dir.
 
472
        :return: An open file object, or None if the file does not exist.
 
473
        """
 
474
        try:
 
475
            return self._controltransport.get(path.lstrip('/'))
 
476
        except NoSuchFile:
 
477
            return None
 
478
 
 
479
    def _put_named_file(self, relpath, contents):
 
480
        self._controltransport.put_bytes(relpath, contents)
 
481
 
 
482
    def index_path(self):
 
483
        """Return the path to the index file."""
 
484
        return self._controltransport.local_abspath(INDEX_FILENAME)
 
485
 
 
486
    def open_index(self):
 
487
        """Open the index for this repository."""
 
488
        from dulwich.index import Index
 
489
        if not self.has_index():
 
490
            raise NoIndexPresent()
 
491
        return Index(self.index_path())
 
492
 
 
493
    def has_index(self):
 
494
        """Check if an index is present."""
 
495
        # Bare repos must never have index files; non-bare repos may have a
 
496
        # missing index file, which is treated as empty.
 
497
        return not self.bare
 
498
 
 
499
    def get_config(self):
 
500
        from dulwich.config import ConfigFile
 
501
        try:
 
502
            with self._controltransport.get('config') as f:
 
503
                return ConfigFile.from_file(f)
 
504
        except NoSuchFile:
 
505
            return ConfigFile()
 
506
 
 
507
    def get_config_stack(self):
 
508
        from dulwich.config import StackedConfig
 
509
        backends = []
 
510
        p = self.get_config()
 
511
        if p is not None:
 
512
            backends.append(p)
 
513
            writable = p
 
514
        else:
 
515
            writable = None
 
516
        backends.extend(StackedConfig.default_backends())
 
517
        return StackedConfig(backends, writable=writable)
 
518
 
 
519
    def __repr__(self):
 
520
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
 
521
 
 
522
    @classmethod
 
523
    def init(cls, transport, bare=False):
 
524
        if not bare:
 
525
            try:
 
526
                transport.mkdir(".git")
 
527
            except FileExists:
 
528
                raise AlreadyControlDirError(transport.base)
 
529
            control_transport = transport.clone(".git")
 
530
        else:
 
531
            control_transport = transport
 
532
        for d in BASE_DIRECTORIES:
 
533
            try:
 
534
                control_transport.mkdir("/".join(d))
 
535
            except FileExists:
 
536
                pass
 
537
        try:
 
538
            control_transport.mkdir(OBJECTDIR)
 
539
        except FileExists:
 
540
            raise AlreadyControlDirError(transport.base)
 
541
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
542
        ret = cls(transport, bare)
 
543
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
544
        ret._init_files(bare)
 
545
        return ret
 
546
 
 
547
 
 
548
class TransportObjectStore(PackBasedObjectStore):
 
549
    """Git-style object store that exists on disk."""
 
550
 
 
551
    def __init__(self, transport):
 
552
        """Open an object store.
 
553
 
 
554
        :param transport: Transport to open data from
 
555
        """
 
556
        super(TransportObjectStore, self).__init__()
 
557
        self.transport = transport
 
558
        self.pack_transport = self.transport.clone(PACKDIR)
 
559
        self._alternates = None
 
560
 
 
561
    def __eq__(self, other):
 
562
        if not isinstance(other, TransportObjectStore):
 
563
            return False
 
564
        return self.transport == other.transport
 
565
 
 
566
    def __repr__(self):
 
567
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
568
 
 
569
    @property
 
570
    def alternates(self):
 
571
        if self._alternates is not None:
 
572
            return self._alternates
 
573
        self._alternates = []
 
574
        for path in self._read_alternate_paths():
 
575
            # FIXME: Check path
 
576
            t = _mod_transport.get_transport_from_path(path)
 
577
            self._alternates.append(self.__class__(t))
 
578
        return self._alternates
 
579
 
 
580
    def _read_alternate_paths(self):
 
581
        try:
 
582
            f = self.transport.get("info/alternates")
 
583
        except NoSuchFile:
 
584
            return []
 
585
        ret = []
 
586
        with f:
 
587
            for l in f.read().splitlines():
 
588
                if l[0] == b"#":
 
589
                    continue
 
590
                if os.path.isabs(l):
 
591
                    continue
 
592
                ret.append(l)
 
593
            return ret
 
594
 
 
595
    def _update_pack_cache(self):
 
596
        pack_files = set()
 
597
        pack_dir_contents = self._pack_names()
 
598
        for name in pack_dir_contents:
 
599
            if name.startswith("pack-") and name.endswith(".pack"):
 
600
                # verify that idx exists first (otherwise the pack was not yet
 
601
                # fully written)
 
602
                idx_name = os.path.splitext(name)[0] + ".idx"
 
603
                if idx_name in pack_dir_contents:
 
604
                    pack_files.add(os.path.splitext(name)[0])
 
605
 
 
606
        new_packs = []
 
607
        for basename in pack_files:
 
608
            pack_name = basename + ".pack"
 
609
            if basename not in self._pack_cache:
 
610
                try:
 
611
                    size = self.pack_transport.stat(pack_name).st_size
 
612
                except TransportNotPossible:
 
613
                    f = self.pack_transport.get(pack_name)
 
614
                    pd = PackData(pack_name, f)
 
615
                else:
 
616
                    pd = PackData(
 
617
                        pack_name, self.pack_transport.get(pack_name),
 
618
                        size=size)
 
619
                idxname = basename + ".idx"
 
620
                idx = load_pack_index_file(
 
621
                    idxname, self.pack_transport.get(idxname))
 
622
                pack = Pack.from_objects(pd, idx)
 
623
                pack._basename = basename
 
624
                self._pack_cache[basename] = pack
 
625
                new_packs.append(pack)
 
626
        # Remove disappeared pack files
 
627
        for f in set(self._pack_cache) - pack_files:
 
628
            self._pack_cache.pop(f).close()
 
629
        return new_packs
 
630
 
 
631
    def _pack_names(self):
 
632
        try:
 
633
            return self.pack_transport.list_dir(".")
 
634
        except TransportNotPossible:
 
635
            try:
 
636
                f = self.transport.get('info/packs')
 
637
            except NoSuchFile:
 
638
                # Hmm, warn about running 'git update-server-info' ?
 
639
                return iter([])
 
640
            else:
 
641
                with f:
 
642
                    return read_packs_file(f)
 
643
        except NoSuchFile:
 
644
            return iter([])
 
645
 
 
646
    def _remove_pack(self, pack):
 
647
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
648
        self.pack_transport.delete(pack.data.filename)
 
649
        try:
 
650
            del self._pack_cache[os.path.basename(pack._basename)]
 
651
        except KeyError:
 
652
            pass
 
653
 
 
654
    def _iter_loose_objects(self):
 
655
        for base in self.transport.list_dir('.'):
 
656
            if len(base) != 2:
 
657
                continue
 
658
            for rest in self.transport.list_dir(base):
 
659
                yield (base + rest).encode(sys.getfilesystemencoding())
 
660
 
 
661
    def _split_loose_object(self, sha):
 
662
        return (sha[:2], sha[2:])
 
663
 
 
664
    def _remove_loose_object(self, sha):
 
665
        path = osutils.joinpath(self._split_loose_object(sha))
 
666
        self.transport.delete(urlutils.quote_from_bytes(path))
 
667
 
 
668
    def _get_loose_object(self, sha):
 
669
        path = osutils.joinpath(self._split_loose_object(sha))
 
670
        try:
 
671
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
 
672
                return ShaFile.from_file(f)
 
673
        except NoSuchFile:
 
674
            return None
 
675
 
 
676
    def add_object(self, obj):
 
677
        """Add a single object to this object store.
 
678
 
 
679
        :param obj: Object to add
 
680
        """
 
681
        (dir, file) = self._split_loose_object(obj.id)
 
682
        try:
 
683
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
 
684
        except FileExists:
 
685
            pass
 
686
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
 
687
        if self.transport.has(path):
 
688
            return  # Already there, no need to write again
 
689
        self.transport.put_bytes(path, obj.as_legacy_object())
 
690
 
 
691
    def move_in_pack(self, f):
 
692
        """Move a specific file containing a pack into the pack directory.
 
693
 
 
694
        :note: The file should be on the same file system as the
 
695
            packs directory.
 
696
 
 
697
        :param path: Path to the pack file.
 
698
        """
 
699
        f.seek(0)
 
700
        p = PackData("", f, len(f.getvalue()))
 
701
        entries = p.sorted_entries()
 
702
        basename = "pack-%s" % iter_sha1(entry[0]
 
703
                                         for entry in entries).decode('ascii')
 
704
        p._filename = basename + ".pack"
 
705
        f.seek(0)
 
706
        self.pack_transport.put_file(basename + ".pack", f)
 
707
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
 
708
        try:
 
709
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
710
        finally:
 
711
            idxfile.close()
 
712
        idxfile = self.pack_transport.get(basename + ".idx")
 
713
        idx = load_pack_index_file(basename + ".idx", idxfile)
 
714
        final_pack = Pack.from_objects(p, idx)
 
715
        final_pack._basename = basename
 
716
        self._add_cached_pack(basename, final_pack)
 
717
        return final_pack
 
718
 
 
719
    def move_in_thin_pack(self, f):
 
720
        """Move a specific file containing a pack into the pack directory.
 
721
 
 
722
        :note: The file should be on the same file system as the
 
723
            packs directory.
 
724
 
 
725
        :param path: Path to the pack file.
 
726
        """
 
727
        f.seek(0)
 
728
        p = Pack('', resolve_ext_ref=self.get_raw)
 
729
        p._data = PackData.from_file(f, len(f.getvalue()))
 
730
        p._data.pack = p
 
731
        p._idx_load = lambda: MemoryPackIndex(
 
732
            p.data.sorted_entries(), p.data.get_stored_checksum())
 
733
 
 
734
        pack_sha = p.index.objects_sha1()
 
735
 
 
736
        datafile = self.pack_transport.open_write_stream(
 
737
            "pack-%s.pack" % pack_sha.decode('ascii'))
 
738
        try:
 
739
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
740
        finally:
 
741
            datafile.close()
 
742
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
743
        idxfile = self.pack_transport.open_write_stream(
 
744
            "pack-%s.idx" % pack_sha.decode('ascii'))
 
745
        try:
 
746
            write_pack_index_v2(idxfile, entries, data_sum)
 
747
        finally:
 
748
            idxfile.close()
 
749
 
 
750
    def add_pack(self):
 
751
        """Add a new pack to this object store.
 
752
 
 
753
        :return: Fileobject to write to and a commit function to
 
754
            call when the pack is finished.
 
755
        """
 
756
        f = BytesIO()
 
757
 
 
758
        def commit():
 
759
            if len(f.getvalue()) > 0:
 
760
                return self.move_in_pack(f)
 
761
            else:
 
762
                return None
 
763
 
 
764
        def abort():
 
765
            return None
 
766
        return f, commit, abort
 
767
 
 
768
    @classmethod
 
769
    def init(cls, transport):
 
770
        try:
 
771
            transport.mkdir('info')
 
772
        except FileExists:
 
773
            pass
 
774
        try:
 
775
            transport.mkdir(PACKDIR)
 
776
        except FileExists:
 
777
            pass
 
778
        return cls(transport)