/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2019-03-04 01:31:43 UTC
  • mfrom: (7058.6.5 memorytree-symlinks)
  • mto: This revision was merged to the branch mainline in revision 7318.
  • Revision ID: jelmer@jelmer.uk-20190304013143-7euyjbmanwo3tpmn
More improvements, add tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""A Git repository implementation that uses a Bazaar transport."""
 
18
 
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
 
 
26
from dulwich.errors import (
 
27
    NoIndexPresent,
 
28
    )
 
29
from dulwich.file import (
 
30
    GitFile,
 
31
    FileLocked,
 
32
    )
 
33
from dulwich.objects import (
 
34
    ShaFile,
 
35
    )
 
36
from dulwich.object_store import (
 
37
    PackBasedObjectStore,
 
38
    PACKDIR,
 
39
    read_packs_file,
 
40
    )
 
41
from dulwich.pack import (
 
42
    MemoryPackIndex,
 
43
    PackData,
 
44
    Pack,
 
45
    iter_sha1,
 
46
    load_pack_index_file,
 
47
    write_pack_objects,
 
48
    write_pack_index_v2,
 
49
    )
 
50
from dulwich.repo import (
 
51
    BaseRepo,
 
52
    InfoRefsContainer,
 
53
    RefsContainer,
 
54
    BASE_DIRECTORIES,
 
55
    COMMONDIR,
 
56
    CONTROLDIR,
 
57
    INDEX_FILENAME,
 
58
    OBJECTDIR,
 
59
    SYMREF,
 
60
    check_ref_format,
 
61
    read_packed_refs_with_peeled,
 
62
    read_packed_refs,
 
63
    write_packed_refs,
 
64
    )
 
65
 
 
66
from .. import (
 
67
    osutils,
 
68
    transport as _mod_transport,
 
69
    urlutils,
 
70
    )
 
71
from ..errors import (
 
72
    AlreadyControlDirError,
 
73
    FileExists,
 
74
    LockBroken,
 
75
    LockContention,
 
76
    NotLocalUrl,
 
77
    NoSuchFile,
 
78
    ReadError,
 
79
    TransportNotPossible,
 
80
    )
 
81
 
 
82
from ..lock import LogicalLockResult
 
83
 
 
84
 
 
85
class TransportRefsContainer(RefsContainer):
 
86
    """Refs container that reads refs from a transport."""
 
87
 
 
88
    def __init__(self, transport, worktree_transport=None):
 
89
        self.transport = transport
 
90
        if worktree_transport is None:
 
91
            worktree_transport = transport
 
92
        self.worktree_transport = worktree_transport
 
93
        self._packed_refs = None
 
94
        self._peeled_refs = None
 
95
 
 
96
    def __repr__(self):
 
97
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
98
 
 
99
    def _ensure_dir_exists(self, path):
 
100
        for n in range(path.count("/")):
 
101
            dirname = "/".join(path.split("/")[:n + 1])
 
102
            try:
 
103
                self.transport.mkdir(dirname)
 
104
            except FileExists:
 
105
                pass
 
106
 
 
107
    def subkeys(self, base):
 
108
        """Refs present in this container under a base.
 
109
 
 
110
        :param base: The base to return refs under.
 
111
        :return: A set of valid refs in this container under the base; the base
 
112
            prefix is stripped from the ref names returned.
 
113
        """
 
114
        keys = set()
 
115
        base_len = len(base) + 1
 
116
        for refname in self.allkeys():
 
117
            if refname.startswith(base):
 
118
                keys.add(refname[base_len:])
 
119
        return keys
 
120
 
 
121
    def allkeys(self):
 
122
        keys = set()
 
123
        try:
 
124
            self.worktree_transport.get_bytes("HEAD")
 
125
        except NoSuchFile:
 
126
            pass
 
127
        else:
 
128
            keys.add(b"HEAD")
 
129
        try:
 
130
            iter_files = list(self.transport.clone(
 
131
                "refs").iter_files_recursive())
 
132
            for filename in iter_files:
 
133
                unquoted_filename = urlutils.unquote_to_bytes(filename)
 
134
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
135
                if check_ref_format(refname):
 
136
                    keys.add(refname)
 
137
        except (TransportNotPossible, NoSuchFile):
 
138
            pass
 
139
        keys.update(self.get_packed_refs())
 
140
        return keys
 
141
 
 
142
    def get_packed_refs(self):
 
143
        """Get contents of the packed-refs file.
 
144
 
 
145
        :return: Dictionary mapping ref names to SHA1s
 
146
 
 
147
        :note: Will return an empty dictionary when no packed-refs file is
 
148
            present.
 
149
        """
 
150
        # TODO: invalidate the cache on repacking
 
151
        if self._packed_refs is None:
 
152
            # set both to empty because we want _peeled_refs to be
 
153
            # None if and only if _packed_refs is also None.
 
154
            self._packed_refs = {}
 
155
            self._peeled_refs = {}
 
156
            try:
 
157
                f = self.transport.get("packed-refs")
 
158
            except NoSuchFile:
 
159
                return {}
 
160
            try:
 
161
                first_line = next(iter(f)).rstrip()
 
162
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
 
163
                        first_line):
 
164
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
 
165
                        self._packed_refs[name] = sha
 
166
                        if peeled:
 
167
                            self._peeled_refs[name] = peeled
 
168
                else:
 
169
                    f.seek(0)
 
170
                    for sha, name in read_packed_refs(f):
 
171
                        self._packed_refs[name] = sha
 
172
            finally:
 
173
                f.close()
 
174
        return self._packed_refs
 
175
 
 
176
    def get_peeled(self, name):
 
177
        """Return the cached peeled value of a ref, if available.
 
178
 
 
179
        :param name: Name of the ref to peel
 
180
        :return: The peeled value of the ref. If the ref is known not point to
 
181
            a tag, this will be the SHA the ref refers to. If the ref may point
 
182
            to a tag, but no cached information is available, None is returned.
 
183
        """
 
184
        self.get_packed_refs()
 
185
        if self._peeled_refs is None or name not in self._packed_refs:
 
186
            # No cache: no peeled refs were read, or this ref is loose
 
187
            return None
 
188
        if name in self._peeled_refs:
 
189
            return self._peeled_refs[name]
 
190
        else:
 
191
            # Known not peelable
 
192
            return self[name]
 
193
 
 
194
    def read_loose_ref(self, name):
 
195
        """Read a reference file and return its contents.
 
196
 
 
197
        If the reference file a symbolic reference, only read the first line of
 
198
        the file. Otherwise, only read the first 40 bytes.
 
199
 
 
200
        :param name: the refname to read, relative to refpath
 
201
        :return: The contents of the ref file, or None if the file does not
 
202
            exist.
 
203
        :raises IOError: if any other error occurs
 
204
        """
 
205
        if name == b'HEAD':
 
206
            transport = self.worktree_transport
 
207
        else:
 
208
            transport = self.transport
 
209
        try:
 
210
            f = transport.get(urlutils.quote_from_bytes(name))
 
211
        except NoSuchFile:
 
212
            return None
 
213
        with f:
 
214
            header = f.read(len(SYMREF))
 
215
            if header == SYMREF:
 
216
                # Read only the first line
 
217
                return header + next(iter(f)).rstrip(b"\r\n")
 
218
            else:
 
219
                # Read only the first 40 bytes
 
220
                return header + f.read(40 - len(SYMREF))
 
221
 
 
222
    def _remove_packed_ref(self, name):
 
223
        if self._packed_refs is None:
 
224
            return
 
225
        # reread cached refs from disk, while holding the lock
 
226
 
 
227
        self._packed_refs = None
 
228
        self.get_packed_refs()
 
229
 
 
230
        if name not in self._packed_refs:
 
231
            return
 
232
 
 
233
        del self._packed_refs[name]
 
234
        if name in self._peeled_refs:
 
235
            del self._peeled_refs[name]
 
236
        f = self.transport.open_write_stream("packed-refs")
 
237
        try:
 
238
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
239
        finally:
 
240
            f.close()
 
241
 
 
242
    def set_symbolic_ref(self, name, other):
 
243
        """Make a ref point at another ref.
 
244
 
 
245
        :param name: Name of the ref to set
 
246
        :param other: Name of the ref to point at
 
247
        """
 
248
        self._check_refname(name)
 
249
        self._check_refname(other)
 
250
        if name != b'HEAD':
 
251
            transport = self.transport
 
252
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
253
        else:
 
254
            transport = self.worktree_transport
 
255
        transport.put_bytes(urlutils.quote_from_bytes(
 
256
            name), SYMREF + other + b'\n')
 
257
 
 
258
    def set_if_equals(self, name, old_ref, new_ref):
 
259
        """Set a refname to new_ref only if it currently equals old_ref.
 
260
 
 
261
        This method follows all symbolic references, and can be used to perform
 
262
        an atomic compare-and-swap operation.
 
263
 
 
264
        :param name: The refname to set.
 
265
        :param old_ref: The old sha the refname must refer to, or None to set
 
266
            unconditionally.
 
267
        :param new_ref: The new sha the refname will refer to.
 
268
        :return: True if the set was successful, False otherwise.
 
269
        """
 
270
        try:
 
271
            realnames, _ = self.follow(name)
 
272
            realname = realnames[-1]
 
273
        except (KeyError, IndexError):
 
274
            realname = name
 
275
        if realname == b'HEAD':
 
276
            transport = self.worktree_transport
 
277
        else:
 
278
            transport = self.transport
 
279
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
280
        transport.put_bytes(urlutils.quote_from_bytes(
 
281
            realname), new_ref + b"\n")
 
282
        return True
 
283
 
 
284
    def add_if_new(self, name, ref):
 
285
        """Add a new reference only if it does not already exist.
 
286
 
 
287
        This method follows symrefs, and only ensures that the last ref in the
 
288
        chain does not exist.
 
289
 
 
290
        :param name: The refname to set.
 
291
        :param ref: The new sha the refname will refer to.
 
292
        :return: True if the add was successful, False otherwise.
 
293
        """
 
294
        try:
 
295
            realnames, contents = self.follow(name)
 
296
            if contents is not None:
 
297
                return False
 
298
            realname = realnames[-1]
 
299
        except (KeyError, IndexError):
 
300
            realname = name
 
301
        self._check_refname(realname)
 
302
        if realname == b'HEAD':
 
303
            transport = self.worktree_transport
 
304
        else:
 
305
            transport = self.transport
 
306
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
 
307
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
 
308
        return True
 
309
 
 
310
    def remove_if_equals(self, name, old_ref):
 
311
        """Remove a refname only if it currently equals old_ref.
 
312
 
 
313
        This method does not follow symbolic references. It can be used to
 
314
        perform an atomic compare-and-delete operation.
 
315
 
 
316
        :param name: The refname to delete.
 
317
        :param old_ref: The old sha the refname must refer to, or None to
 
318
            delete unconditionally.
 
319
        :return: True if the delete was successful, False otherwise.
 
320
        """
 
321
        self._check_refname(name)
 
322
        # may only be packed
 
323
        if name == b'HEAD':
 
324
            transport = self.worktree_transport
 
325
        else:
 
326
            transport = self.transport
 
327
        try:
 
328
            transport.delete(urlutils.quote_from_bytes(name))
 
329
        except NoSuchFile:
 
330
            pass
 
331
        self._remove_packed_ref(name)
 
332
        return True
 
333
 
 
334
    def get(self, name, default=None):
 
335
        try:
 
336
            return self[name]
 
337
        except KeyError:
 
338
            return default
 
339
 
 
340
    def unlock_ref(self, name):
 
341
        if name == b"HEAD":
 
342
            transport = self.worktree_transport
 
343
        else:
 
344
            transport = self.transport
 
345
        lockname = name + b".lock"
 
346
        try:
 
347
            transport.delete(urlutils.quote_from_bytes(lockname))
 
348
        except NoSuchFile:
 
349
            pass
 
350
 
 
351
    def lock_ref(self, name):
 
352
        if name == b"HEAD":
 
353
            transport = self.worktree_transport
 
354
        else:
 
355
            transport = self.transport
 
356
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
 
357
        lockname = urlutils.quote_from_bytes(name + b".lock")
 
358
        try:
 
359
            local_path = transport.local_abspath(
 
360
                urlutils.quote_from_bytes(name))
 
361
        except NotLocalUrl:
 
362
            # This is racy, but what can we do?
 
363
            if transport.has(lockname):
 
364
                raise LockContention(name)
 
365
            transport.put_bytes(lockname, b'Locked by brz-git')
 
366
            return LogicalLockResult(lambda: transport.delete(lockname))
 
367
        else:
 
368
            try:
 
369
                gf = GitFile(local_path, 'wb')
 
370
            except FileLocked as e:
 
371
                raise LockContention(name, e)
 
372
            else:
 
373
                def unlock():
 
374
                    try:
 
375
                        transport.delete(lockname)
 
376
                    except NoSuchFile:
 
377
                        raise LockBroken(lockname)
 
378
                    # GitFile.abort doesn't care if the lock has already
 
379
                    # disappeared
 
380
                    gf.abort()
 
381
                return LogicalLockResult(unlock)
 
382
 
 
383
 
 
384
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
 
385
# rather than bytes..
 
386
def read_gitfile(f):
 
387
    """Read a ``.git`` file.
 
388
 
 
389
    The first line of the file should start with "gitdir: "
 
390
 
 
391
    :param f: File-like object to read from
 
392
    :return: A path
 
393
    """
 
394
    cs = f.read()
 
395
    if not cs.startswith(b"gitdir: "):
 
396
        raise ValueError("Expected file to start with 'gitdir: '")
 
397
    return cs[len(b"gitdir: "):].rstrip(b"\n")
 
398
 
 
399
 
 
400
class TransportRepo(BaseRepo):
 
401
 
 
402
    def __init__(self, transport, bare, refs_text=None):
 
403
        self.transport = transport
 
404
        self.bare = bare
 
405
        try:
 
406
            with transport.get(CONTROLDIR) as f:
 
407
                path = read_gitfile(f)
 
408
        except (ReadError, NoSuchFile):
 
409
            if self.bare:
 
410
                self._controltransport = self.transport
 
411
            else:
 
412
                self._controltransport = self.transport.clone('.git')
 
413
        else:
 
414
            self._controltransport = self.transport.clone(
 
415
                urlutils.quote_from_bytes(path))
 
416
        commondir = self.get_named_file(COMMONDIR)
 
417
        if commondir is not None:
 
418
            with commondir:
 
419
                commondir = os.path.join(
 
420
                    self.controldir(),
 
421
                    commondir.read().rstrip(b"\r\n").decode(
 
422
                        sys.getfilesystemencoding()))
 
423
                self._commontransport = \
 
424
                    _mod_transport.get_transport_from_path(commondir)
 
425
        else:
 
426
            self._commontransport = self._controltransport
 
427
        object_store = TransportObjectStore(
 
428
            self._commontransport.clone(OBJECTDIR))
 
429
        if refs_text is not None:
 
430
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
431
            try:
 
432
                head = TransportRefsContainer(
 
433
                    self._commontransport).read_loose_ref("HEAD")
 
434
            except KeyError:
 
435
                pass
 
436
            else:
 
437
                refs_container._refs["HEAD"] = head
 
438
        else:
 
439
            refs_container = TransportRefsContainer(
 
440
                self._commontransport, self._controltransport)
 
441
        super(TransportRepo, self).__init__(object_store,
 
442
                                            refs_container)
 
443
 
 
444
    def controldir(self):
 
445
        return self._controltransport.local_abspath('.')
 
446
 
 
447
    def commondir(self):
 
448
        return self._commontransport.local_abspath('.')
 
449
 
 
450
    @property
 
451
    def path(self):
 
452
        return self.transport.local_abspath('.')
 
453
 
 
454
    def _determine_file_mode(self):
 
455
        # Be consistent with bzr
 
456
        if sys.platform == 'win32':
 
457
            return False
 
458
        return True
 
459
 
 
460
    def get_named_file(self, path):
 
461
        """Get a file from the control dir with a specific name.
 
462
 
 
463
        Although the filename should be interpreted as a filename relative to
 
464
        the control dir in a disk-baked Repo, the object returned need not be
 
465
        pointing to a file in that location.
 
466
 
 
467
        :param path: The path to the file, relative to the control dir.
 
468
        :return: An open file object, or None if the file does not exist.
 
469
        """
 
470
        try:
 
471
            return self._controltransport.get(path.lstrip('/'))
 
472
        except NoSuchFile:
 
473
            return None
 
474
 
 
475
    def _put_named_file(self, relpath, contents):
 
476
        self._controltransport.put_bytes(relpath, contents)
 
477
 
 
478
    def index_path(self):
 
479
        """Return the path to the index file."""
 
480
        return self._controltransport.local_abspath(INDEX_FILENAME)
 
481
 
 
482
    def open_index(self):
 
483
        """Open the index for this repository."""
 
484
        from dulwich.index import Index
 
485
        if not self.has_index():
 
486
            raise NoIndexPresent()
 
487
        return Index(self.index_path())
 
488
 
 
489
    def has_index(self):
 
490
        """Check if an index is present."""
 
491
        # Bare repos must never have index files; non-bare repos may have a
 
492
        # missing index file, which is treated as empty.
 
493
        return not self.bare
 
494
 
 
495
    def get_config(self):
 
496
        from dulwich.config import ConfigFile
 
497
        try:
 
498
            with self._controltransport.get('config') as f:
 
499
                return ConfigFile.from_file(f)
 
500
        except NoSuchFile:
 
501
            return ConfigFile()
 
502
 
 
503
    def get_config_stack(self):
 
504
        from dulwich.config import StackedConfig
 
505
        backends = []
 
506
        p = self.get_config()
 
507
        if p is not None:
 
508
            backends.append(p)
 
509
            writable = p
 
510
        else:
 
511
            writable = None
 
512
        backends.extend(StackedConfig.default_backends())
 
513
        return StackedConfig(backends, writable=writable)
 
514
 
 
515
    def __repr__(self):
 
516
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
 
517
 
 
518
    @classmethod
 
519
    def init(cls, transport, bare=False):
 
520
        if not bare:
 
521
            try:
 
522
                transport.mkdir(".git")
 
523
            except FileExists:
 
524
                raise AlreadyControlDirError(transport.base)
 
525
            control_transport = transport.clone(".git")
 
526
        else:
 
527
            control_transport = transport
 
528
        for d in BASE_DIRECTORIES:
 
529
            try:
 
530
                control_transport.mkdir("/".join(d))
 
531
            except FileExists:
 
532
                pass
 
533
        try:
 
534
            control_transport.mkdir(OBJECTDIR)
 
535
        except FileExists:
 
536
            raise AlreadyControlDirError(transport.base)
 
537
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
538
        ret = cls(transport, bare)
 
539
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
540
        ret._init_files(bare)
 
541
        return ret
 
542
 
 
543
 
 
544
class TransportObjectStore(PackBasedObjectStore):
 
545
    """Git-style object store that exists on disk."""
 
546
 
 
547
    def __init__(self, transport):
 
548
        """Open an object store.
 
549
 
 
550
        :param transport: Transport to open data from
 
551
        """
 
552
        super(TransportObjectStore, self).__init__()
 
553
        self.transport = transport
 
554
        self.pack_transport = self.transport.clone(PACKDIR)
 
555
        self._alternates = None
 
556
 
 
557
    def __eq__(self, other):
 
558
        if not isinstance(other, TransportObjectStore):
 
559
            return False
 
560
        return self.transport == other.transport
 
561
 
 
562
    def __repr__(self):
 
563
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
564
 
 
565
    @property
 
566
    def alternates(self):
 
567
        if self._alternates is not None:
 
568
            return self._alternates
 
569
        self._alternates = []
 
570
        for path in self._read_alternate_paths():
 
571
            # FIXME: Check path
 
572
            t = _mod_transport.get_transport_from_path(path)
 
573
            self._alternates.append(self.__class__(t))
 
574
        return self._alternates
 
575
 
 
576
    def _read_alternate_paths(self):
 
577
        try:
 
578
            f = self.transport.get("info/alternates")
 
579
        except NoSuchFile:
 
580
            return []
 
581
        ret = []
 
582
        with f:
 
583
            for l in f.read().splitlines():
 
584
                if l[0] == b"#":
 
585
                    continue
 
586
                if os.path.isabs(l):
 
587
                    continue
 
588
                ret.append(l)
 
589
            return ret
 
590
 
 
591
    def _update_pack_cache(self):
 
592
        pack_files = set()
 
593
        pack_dir_contents = self._pack_names()
 
594
        for name in pack_dir_contents:
 
595
            if name.startswith("pack-") and name.endswith(".pack"):
 
596
                # verify that idx exists first (otherwise the pack was not yet
 
597
                # fully written)
 
598
                idx_name = os.path.splitext(name)[0] + ".idx"
 
599
                if idx_name in pack_dir_contents:
 
600
                    pack_files.add(os.path.splitext(name)[0])
 
601
 
 
602
        new_packs = []
 
603
        for basename in pack_files:
 
604
            pack_name = basename + ".pack"
 
605
            if basename not in self._pack_cache:
 
606
                try:
 
607
                    size = self.pack_transport.stat(pack_name).st_size
 
608
                except TransportNotPossible:
 
609
                    f = self.pack_transport.get(pack_name)
 
610
                    pd = PackData(pack_name, f)
 
611
                else:
 
612
                    pd = PackData(
 
613
                        pack_name, self.pack_transport.get(pack_name),
 
614
                        size=size)
 
615
                idxname = basename + ".idx"
 
616
                idx = load_pack_index_file(
 
617
                    idxname, self.pack_transport.get(idxname))
 
618
                pack = Pack.from_objects(pd, idx)
 
619
                pack._basename = basename
 
620
                self._pack_cache[basename] = pack
 
621
                new_packs.append(pack)
 
622
        # Remove disappeared pack files
 
623
        for f in set(self._pack_cache) - pack_files:
 
624
            self._pack_cache.pop(f).close()
 
625
        return new_packs
 
626
 
 
627
    def _pack_names(self):
 
628
        try:
 
629
            return self.pack_transport.list_dir(".")
 
630
        except TransportNotPossible:
 
631
            try:
 
632
                f = self.transport.get('info/packs')
 
633
            except NoSuchFile:
 
634
                # Hmm, warn about running 'git update-server-info' ?
 
635
                return iter([])
 
636
            else:
 
637
                with f:
 
638
                    return read_packs_file(f)
 
639
        except NoSuchFile:
 
640
            return iter([])
 
641
 
 
642
    def _remove_pack(self, pack):
 
643
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
644
        self.pack_transport.delete(pack.data.filename)
 
645
        try:
 
646
            del self._pack_cache[os.path.basename(pack._basename)]
 
647
        except KeyError:
 
648
            pass
 
649
 
 
650
    def _iter_loose_objects(self):
 
651
        for base in self.transport.list_dir('.'):
 
652
            if len(base) != 2:
 
653
                continue
 
654
            for rest in self.transport.list_dir(base):
 
655
                yield (base + rest).encode(sys.getfilesystemencoding())
 
656
 
 
657
    def _split_loose_object(self, sha):
 
658
        return (sha[:2], sha[2:])
 
659
 
 
660
    def _remove_loose_object(self, sha):
 
661
        path = osutils.joinpath(self._split_loose_object(sha))
 
662
        self.transport.delete(urlutils.quote_from_bytes(path))
 
663
 
 
664
    def _get_loose_object(self, sha):
 
665
        path = osutils.joinpath(self._split_loose_object(sha))
 
666
        try:
 
667
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
 
668
                return ShaFile.from_file(f)
 
669
        except NoSuchFile:
 
670
            return None
 
671
 
 
672
    def add_object(self, obj):
 
673
        """Add a single object to this object store.
 
674
 
 
675
        :param obj: Object to add
 
676
        """
 
677
        (dir, file) = self._split_loose_object(obj.id)
 
678
        try:
 
679
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
 
680
        except FileExists:
 
681
            pass
 
682
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
 
683
        if self.transport.has(path):
 
684
            return  # Already there, no need to write again
 
685
        self.transport.put_bytes(path, obj.as_legacy_object())
 
686
 
 
687
    def move_in_pack(self, f):
 
688
        """Move a specific file containing a pack into the pack directory.
 
689
 
 
690
        :note: The file should be on the same file system as the
 
691
            packs directory.
 
692
 
 
693
        :param path: Path to the pack file.
 
694
        """
 
695
        f.seek(0)
 
696
        p = PackData("", f, len(f.getvalue()))
 
697
        entries = p.sorted_entries()
 
698
        basename = "pack-%s" % iter_sha1(entry[0]
 
699
                                         for entry in entries).decode('ascii')
 
700
        p._filename = basename + ".pack"
 
701
        f.seek(0)
 
702
        self.pack_transport.put_file(basename + ".pack", f)
 
703
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
 
704
        try:
 
705
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
706
        finally:
 
707
            idxfile.close()
 
708
        idxfile = self.pack_transport.get(basename + ".idx")
 
709
        idx = load_pack_index_file(basename + ".idx", idxfile)
 
710
        final_pack = Pack.from_objects(p, idx)
 
711
        final_pack._basename = basename
 
712
        self._add_cached_pack(basename, final_pack)
 
713
        return final_pack
 
714
 
 
715
    def move_in_thin_pack(self, f):
 
716
        """Move a specific file containing a pack into the pack directory.
 
717
 
 
718
        :note: The file should be on the same file system as the
 
719
            packs directory.
 
720
 
 
721
        :param path: Path to the pack file.
 
722
        """
 
723
        f.seek(0)
 
724
        p = Pack('', resolve_ext_ref=self.get_raw)
 
725
        p._data = PackData.from_file(f, len(f.getvalue()))
 
726
        p._data.pack = p
 
727
        p._idx_load = lambda: MemoryPackIndex(
 
728
            p.data.sorted_entries(), p.data.get_stored_checksum())
 
729
 
 
730
        pack_sha = p.index.objects_sha1()
 
731
 
 
732
        datafile = self.pack_transport.open_write_stream(
 
733
            "pack-%s.pack" % pack_sha.decode('ascii'))
 
734
        try:
 
735
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
736
        finally:
 
737
            datafile.close()
 
738
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
739
        idxfile = self.pack_transport.open_write_stream(
 
740
            "pack-%s.idx" % pack_sha.decode('ascii'))
 
741
        try:
 
742
            write_pack_index_v2(idxfile, entries, data_sum)
 
743
        finally:
 
744
            idxfile.close()
 
745
 
 
746
    def add_pack(self):
 
747
        """Add a new pack to this object store.
 
748
 
 
749
        :return: Fileobject to write to and a commit function to
 
750
            call when the pack is finished.
 
751
        """
 
752
        f = BytesIO()
 
753
 
 
754
        def commit():
 
755
            if len(f.getvalue()) > 0:
 
756
                return self.move_in_pack(f)
 
757
            else:
 
758
                return None
 
759
 
 
760
        def abort():
 
761
            return None
 
762
        return f, commit, abort
 
763
 
 
764
    @classmethod
 
765
    def init(cls, transport):
 
766
        try:
 
767
            transport.mkdir('info')
 
768
        except FileExists:
 
769
            pass
 
770
        try:
 
771
            transport.mkdir(PACKDIR)
 
772
        except FileExists:
 
773
            pass
 
774
        return cls(transport)