/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Jelmer Vernooij
  • Date: 2017-05-21 12:41:27 UTC
  • mto: This revision was merged to the branch mainline in revision 6623.
  • Revision ID: jelmer@jelmer.uk-20170521124127-iv8etg0vwymyai6y
s/bzr/brz/ in apport config.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
2
 
#
3
 
# This program is free software; you can redistribute it and/or modify
4
 
# it under the terms of the GNU General Public License as published by
5
 
# the Free Software Foundation; either version 2 of the License, or
6
 
# (at your option) any later version.
7
 
#
8
 
# This program is distributed in the hope that it will be useful,
9
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
 
# GNU General Public License for more details.
12
 
#
13
 
# You should have received a copy of the GNU General Public License
14
 
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
 
 
17
 
"""A Git repository implementation that uses a Bazaar transport."""
18
 
 
19
 
from io import BytesIO
20
 
 
21
 
import os
22
 
import sys
23
 
 
24
 
from dulwich.errors import (
25
 
    NoIndexPresent,
26
 
    )
27
 
from dulwich.file import (
28
 
    GitFile,
29
 
    FileLocked,
30
 
    )
31
 
from dulwich.objects import (
32
 
    ShaFile,
33
 
    )
34
 
from dulwich.object_store import (
35
 
    PackBasedObjectStore,
36
 
    PACKDIR,
37
 
    read_packs_file,
38
 
    )
39
 
from dulwich.pack import (
40
 
    MemoryPackIndex,
41
 
    PackData,
42
 
    Pack,
43
 
    iter_sha1,
44
 
    load_pack_index_file,
45
 
    write_pack_objects,
46
 
    write_pack_index_v2,
47
 
    )
48
 
from dulwich.repo import (
49
 
    BaseRepo,
50
 
    InfoRefsContainer,
51
 
    RefsContainer,
52
 
    BASE_DIRECTORIES,
53
 
    COMMONDIR,
54
 
    CONTROLDIR,
55
 
    INDEX_FILENAME,
56
 
    OBJECTDIR,
57
 
    SYMREF,
58
 
    check_ref_format,
59
 
    read_packed_refs_with_peeled,
60
 
    read_packed_refs,
61
 
    write_packed_refs,
62
 
    )
63
 
 
64
 
from .. import (
65
 
    osutils,
66
 
    transport as _mod_transport,
67
 
    urlutils,
68
 
    )
69
 
from ..errors import (
70
 
    AlreadyControlDirError,
71
 
    FileExists,
72
 
    LockBroken,
73
 
    LockContention,
74
 
    NotLocalUrl,
75
 
    NoSuchFile,
76
 
    ReadError,
77
 
    TransportNotPossible,
78
 
    )
79
 
 
80
 
from ..lock import LogicalLockResult
81
 
from ..trace import warning
82
 
 
83
 
 
84
 
class TransportRefsContainer(RefsContainer):
85
 
    """Refs container that reads refs from a transport."""
86
 
 
87
 
    def __init__(self, transport, worktree_transport=None):
88
 
        self.transport = transport
89
 
        if worktree_transport is None:
90
 
            worktree_transport = transport
91
 
        self.worktree_transport = worktree_transport
92
 
        self._packed_refs = None
93
 
        self._peeled_refs = None
94
 
 
95
 
    def __repr__(self):
96
 
        return "%s(%r)" % (self.__class__.__name__, self.transport)
97
 
 
98
 
    def _ensure_dir_exists(self, path):
99
 
        for n in range(path.count("/")):
100
 
            dirname = "/".join(path.split("/")[:n + 1])
101
 
            try:
102
 
                self.transport.mkdir(dirname)
103
 
            except FileExists:
104
 
                pass
105
 
 
106
 
    def subkeys(self, base):
107
 
        """Refs present in this container under a base.
108
 
 
109
 
        :param base: The base to return refs under.
110
 
        :return: A set of valid refs in this container under the base; the base
111
 
            prefix is stripped from the ref names returned.
112
 
        """
113
 
        keys = set()
114
 
        base_len = len(base) + 1
115
 
        for refname in self.allkeys():
116
 
            if refname.startswith(base):
117
 
                keys.add(refname[base_len:])
118
 
        return keys
119
 
 
120
 
    def allkeys(self):
121
 
        keys = set()
122
 
        try:
123
 
            self.worktree_transport.get_bytes("HEAD")
124
 
        except NoSuchFile:
125
 
            pass
126
 
        else:
127
 
            keys.add(b"HEAD")
128
 
        try:
129
 
            iter_files = list(self.transport.clone(
130
 
                "refs").iter_files_recursive())
131
 
            for filename in iter_files:
132
 
                unquoted_filename = urlutils.unquote_to_bytes(filename)
133
 
                refname = osutils.pathjoin(b"refs", unquoted_filename)
134
 
                if check_ref_format(refname):
135
 
                    keys.add(refname)
136
 
        except (TransportNotPossible, NoSuchFile):
137
 
            pass
138
 
        keys.update(self.get_packed_refs())
139
 
        return keys
140
 
 
141
 
    def get_packed_refs(self):
142
 
        """Get contents of the packed-refs file.
143
 
 
144
 
        :return: Dictionary mapping ref names to SHA1s
145
 
 
146
 
        :note: Will return an empty dictionary when no packed-refs file is
147
 
            present.
148
 
        """
149
 
        # TODO: invalidate the cache on repacking
150
 
        if self._packed_refs is None:
151
 
            # set both to empty because we want _peeled_refs to be
152
 
            # None if and only if _packed_refs is also None.
153
 
            self._packed_refs = {}
154
 
            self._peeled_refs = {}
155
 
            try:
156
 
                f = self.transport.get("packed-refs")
157
 
            except NoSuchFile:
158
 
                return {}
159
 
            try:
160
 
                first_line = next(iter(f)).rstrip()
161
 
                if (first_line.startswith(b"# pack-refs") and b" peeled" in
162
 
                        first_line):
163
 
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
164
 
                        self._packed_refs[name] = sha
165
 
                        if peeled:
166
 
                            self._peeled_refs[name] = peeled
167
 
                else:
168
 
                    f.seek(0)
169
 
                    for sha, name in read_packed_refs(f):
170
 
                        self._packed_refs[name] = sha
171
 
            finally:
172
 
                f.close()
173
 
        return self._packed_refs
174
 
 
175
 
    def get_peeled(self, name):
176
 
        """Return the cached peeled value of a ref, if available.
177
 
 
178
 
        :param name: Name of the ref to peel
179
 
        :return: The peeled value of the ref. If the ref is known not point to
180
 
            a tag, this will be the SHA the ref refers to. If the ref may point
181
 
            to a tag, but no cached information is available, None is returned.
182
 
        """
183
 
        self.get_packed_refs()
184
 
        if self._peeled_refs is None or name not in self._packed_refs:
185
 
            # No cache: no peeled refs were read, or this ref is loose
186
 
            return None
187
 
        if name in self._peeled_refs:
188
 
            return self._peeled_refs[name]
189
 
        else:
190
 
            # Known not peelable
191
 
            return self[name]
192
 
 
193
 
    def read_loose_ref(self, name):
194
 
        """Read a reference file and return its contents.
195
 
 
196
 
        If the reference file a symbolic reference, only read the first line of
197
 
        the file. Otherwise, only read the first 40 bytes.
198
 
 
199
 
        :param name: the refname to read, relative to refpath
200
 
        :return: The contents of the ref file, or None if the file does not
201
 
            exist.
202
 
        :raises IOError: if any other error occurs
203
 
        """
204
 
        if name == b'HEAD':
205
 
            transport = self.worktree_transport
206
 
        else:
207
 
            transport = self.transport
208
 
        try:
209
 
            f = transport.get(urlutils.quote_from_bytes(name))
210
 
        except NoSuchFile:
211
 
            return None
212
 
        with f:
213
 
            try:
214
 
                header = f.read(len(SYMREF))
215
 
            except ReadError:
216
 
                # probably a directory
217
 
                return None
218
 
            if header == SYMREF:
219
 
                # Read only the first line
220
 
                return header + next(iter(f)).rstrip(b"\r\n")
221
 
            else:
222
 
                # Read only the first 40 bytes
223
 
                return header + f.read(40 - len(SYMREF))
224
 
 
225
 
    def _remove_packed_ref(self, name):
226
 
        if self._packed_refs is None:
227
 
            return
228
 
        # reread cached refs from disk, while holding the lock
229
 
 
230
 
        self._packed_refs = None
231
 
        self.get_packed_refs()
232
 
 
233
 
        if name not in self._packed_refs:
234
 
            return
235
 
 
236
 
        del self._packed_refs[name]
237
 
        if name in self._peeled_refs:
238
 
            del self._peeled_refs[name]
239
 
        with self.transport.open_write_stream("packed-refs") as f:
240
 
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
241
 
 
242
 
    def set_symbolic_ref(self, name, other):
243
 
        """Make a ref point at another ref.
244
 
 
245
 
        :param name: Name of the ref to set
246
 
        :param other: Name of the ref to point at
247
 
        """
248
 
        self._check_refname(name)
249
 
        self._check_refname(other)
250
 
        if name != b'HEAD':
251
 
            transport = self.transport
252
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
253
 
        else:
254
 
            transport = self.worktree_transport
255
 
        transport.put_bytes(urlutils.quote_from_bytes(
256
 
            name), SYMREF + other + b'\n')
257
 
 
258
 
    def set_if_equals(self, name, old_ref, new_ref):
259
 
        """Set a refname to new_ref only if it currently equals old_ref.
260
 
 
261
 
        This method follows all symbolic references, and can be used to perform
262
 
        an atomic compare-and-swap operation.
263
 
 
264
 
        :param name: The refname to set.
265
 
        :param old_ref: The old sha the refname must refer to, or None to set
266
 
            unconditionally.
267
 
        :param new_ref: The new sha the refname will refer to.
268
 
        :return: True if the set was successful, False otherwise.
269
 
        """
270
 
        try:
271
 
            realnames, _ = self.follow(name)
272
 
            realname = realnames[-1]
273
 
        except (KeyError, IndexError):
274
 
            realname = name
275
 
        if realname == b'HEAD':
276
 
            transport = self.worktree_transport
277
 
        else:
278
 
            transport = self.transport
279
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
280
 
        transport.put_bytes(urlutils.quote_from_bytes(
281
 
            realname), new_ref + b"\n")
282
 
        return True
283
 
 
284
 
    def add_if_new(self, name, ref):
285
 
        """Add a new reference only if it does not already exist.
286
 
 
287
 
        This method follows symrefs, and only ensures that the last ref in the
288
 
        chain does not exist.
289
 
 
290
 
        :param name: The refname to set.
291
 
        :param ref: The new sha the refname will refer to.
292
 
        :return: True if the add was successful, False otherwise.
293
 
        """
294
 
        try:
295
 
            realnames, contents = self.follow(name)
296
 
            if contents is not None:
297
 
                return False
298
 
            realname = realnames[-1]
299
 
        except (KeyError, IndexError):
300
 
            realname = name
301
 
        self._check_refname(realname)
302
 
        if realname == b'HEAD':
303
 
            transport = self.worktree_transport
304
 
        else:
305
 
            transport = self.transport
306
 
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
307
 
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
308
 
        return True
309
 
 
310
 
    def remove_if_equals(self, name, old_ref):
311
 
        """Remove a refname only if it currently equals old_ref.
312
 
 
313
 
        This method does not follow symbolic references. It can be used to
314
 
        perform an atomic compare-and-delete operation.
315
 
 
316
 
        :param name: The refname to delete.
317
 
        :param old_ref: The old sha the refname must refer to, or None to
318
 
            delete unconditionally.
319
 
        :return: True if the delete was successful, False otherwise.
320
 
        """
321
 
        self._check_refname(name)
322
 
        # may only be packed
323
 
        if name == b'HEAD':
324
 
            transport = self.worktree_transport
325
 
        else:
326
 
            transport = self.transport
327
 
        try:
328
 
            transport.delete(urlutils.quote_from_bytes(name))
329
 
        except NoSuchFile:
330
 
            pass
331
 
        self._remove_packed_ref(name)
332
 
        return True
333
 
 
334
 
    def get(self, name, default=None):
335
 
        try:
336
 
            return self[name]
337
 
        except KeyError:
338
 
            return default
339
 
 
340
 
    def unlock_ref(self, name):
341
 
        if name == b"HEAD":
342
 
            transport = self.worktree_transport
343
 
        else:
344
 
            transport = self.transport
345
 
        lockname = name + b".lock"
346
 
        try:
347
 
            transport.delete(urlutils.quote_from_bytes(lockname))
348
 
        except NoSuchFile:
349
 
            pass
350
 
 
351
 
    def lock_ref(self, name):
352
 
        if name == b"HEAD":
353
 
            transport = self.worktree_transport
354
 
        else:
355
 
            transport = self.transport
356
 
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
357
 
        lockname = urlutils.quote_from_bytes(name + b".lock")
358
 
        try:
359
 
            local_path = transport.local_abspath(
360
 
                urlutils.quote_from_bytes(name))
361
 
        except NotLocalUrl:
362
 
            # This is racy, but what can we do?
363
 
            if transport.has(lockname):
364
 
                raise LockContention(name)
365
 
            transport.put_bytes(lockname, b'Locked by brz-git')
366
 
            return LogicalLockResult(lambda: transport.delete(lockname))
367
 
        else:
368
 
            try:
369
 
                gf = GitFile(local_path, 'wb')
370
 
            except FileLocked as e:
371
 
                raise LockContention(name, e)
372
 
            else:
373
 
                def unlock():
374
 
                    try:
375
 
                        transport.delete(lockname)
376
 
                    except NoSuchFile:
377
 
                        raise LockBroken(lockname)
378
 
                    # GitFile.abort doesn't care if the lock has already
379
 
                    # disappeared
380
 
                    gf.abort()
381
 
                return LogicalLockResult(unlock)
382
 
 
383
 
 
384
 
# TODO(jelmer): Use upstream read_gitfile; unfortunately that expects strings
385
 
# rather than bytes..
386
 
def read_gitfile(f):
387
 
    """Read a ``.git`` file.
388
 
 
389
 
    The first line of the file should start with "gitdir: "
390
 
 
391
 
    :param f: File-like object to read from
392
 
    :return: A path
393
 
    """
394
 
    cs = f.read()
395
 
    if not cs.startswith(b"gitdir: "):
396
 
        raise ValueError("Expected file to start with 'gitdir: '")
397
 
    return cs[len(b"gitdir: "):].rstrip(b"\n")
398
 
 
399
 
 
400
 
class TransportRepo(BaseRepo):
401
 
 
402
 
    def __init__(self, transport, bare, refs_text=None):
403
 
        self.transport = transport
404
 
        self.bare = bare
405
 
        try:
406
 
            with transport.get(CONTROLDIR) as f:
407
 
                path = read_gitfile(f)
408
 
        except (ReadError, NoSuchFile):
409
 
            if self.bare:
410
 
                self._controltransport = self.transport
411
 
            else:
412
 
                self._controltransport = self.transport.clone('.git')
413
 
        else:
414
 
            self._controltransport = self.transport.clone(
415
 
                urlutils.quote_from_bytes(path))
416
 
        commondir = self.get_named_file(COMMONDIR)
417
 
        if commondir is not None:
418
 
            with commondir:
419
 
                commondir = os.path.join(
420
 
                    self.controldir(),
421
 
                    commondir.read().rstrip(b"\r\n").decode(
422
 
                        sys.getfilesystemencoding()))
423
 
                self._commontransport = \
424
 
                    _mod_transport.get_transport_from_path(commondir)
425
 
        else:
426
 
            self._commontransport = self._controltransport
427
 
        object_store = TransportObjectStore(
428
 
            self._commontransport.clone(OBJECTDIR))
429
 
        if refs_text is not None:
430
 
            refs_container = InfoRefsContainer(BytesIO(refs_text))
431
 
            try:
432
 
                head = TransportRefsContainer(
433
 
                    self._commontransport).read_loose_ref(b"HEAD")
434
 
            except KeyError:
435
 
                pass
436
 
            else:
437
 
                refs_container._refs[b"HEAD"] = head
438
 
        else:
439
 
            refs_container = TransportRefsContainer(
440
 
                self._commontransport, self._controltransport)
441
 
        super(TransportRepo, self).__init__(object_store,
442
 
                                            refs_container)
443
 
 
444
 
    def controldir(self):
445
 
        return self._controltransport.local_abspath('.')
446
 
 
447
 
    def commondir(self):
448
 
        return self._commontransport.local_abspath('.')
449
 
 
450
 
    @property
451
 
    def path(self):
452
 
        return self.transport.local_abspath('.')
453
 
 
454
 
    def _determine_file_mode(self):
455
 
        # Be consistent with bzr
456
 
        if sys.platform == 'win32':
457
 
            return False
458
 
        return True
459
 
 
460
 
    def get_named_file(self, path):
461
 
        """Get a file from the control dir with a specific name.
462
 
 
463
 
        Although the filename should be interpreted as a filename relative to
464
 
        the control dir in a disk-baked Repo, the object returned need not be
465
 
        pointing to a file in that location.
466
 
 
467
 
        :param path: The path to the file, relative to the control dir.
468
 
        :return: An open file object, or None if the file does not exist.
469
 
        """
470
 
        try:
471
 
            return self._controltransport.get(path.lstrip('/'))
472
 
        except NoSuchFile:
473
 
            return None
474
 
 
475
 
    def _put_named_file(self, relpath, contents):
476
 
        self._controltransport.put_bytes(relpath, contents)
477
 
 
478
 
    def index_path(self):
479
 
        """Return the path to the index file."""
480
 
        return self._controltransport.local_abspath(INDEX_FILENAME)
481
 
 
482
 
    def open_index(self):
483
 
        """Open the index for this repository."""
484
 
        from dulwich.index import Index
485
 
        if not self.has_index():
486
 
            raise NoIndexPresent()
487
 
        return Index(self.index_path())
488
 
 
489
 
    def has_index(self):
490
 
        """Check if an index is present."""
491
 
        # Bare repos must never have index files; non-bare repos may have a
492
 
        # missing index file, which is treated as empty.
493
 
        return not self.bare
494
 
 
495
 
    def get_config(self):
496
 
        from dulwich.config import ConfigFile
497
 
        try:
498
 
            with self._controltransport.get('config') as f:
499
 
                return ConfigFile.from_file(f)
500
 
        except NoSuchFile:
501
 
            return ConfigFile()
502
 
 
503
 
    def get_config_stack(self):
504
 
        from dulwich.config import StackedConfig
505
 
        backends = []
506
 
        p = self.get_config()
507
 
        if p is not None:
508
 
            backends.append(p)
509
 
            writable = p
510
 
        else:
511
 
            writable = None
512
 
        backends.extend(StackedConfig.default_backends())
513
 
        return StackedConfig(backends, writable=writable)
514
 
 
515
 
    def __repr__(self):
516
 
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
517
 
 
518
 
    @classmethod
519
 
    def init(cls, transport, bare=False):
520
 
        if not bare:
521
 
            try:
522
 
                transport.mkdir(".git")
523
 
            except FileExists:
524
 
                raise AlreadyControlDirError(transport.base)
525
 
            control_transport = transport.clone(".git")
526
 
        else:
527
 
            control_transport = transport
528
 
        for d in BASE_DIRECTORIES:
529
 
            try:
530
 
                control_transport.mkdir("/".join(d))
531
 
            except FileExists:
532
 
                pass
533
 
        try:
534
 
            control_transport.mkdir(OBJECTDIR)
535
 
        except FileExists:
536
 
            raise AlreadyControlDirError(transport.base)
537
 
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
538
 
        ret = cls(transport, bare)
539
 
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
540
 
        ret._init_files(bare)
541
 
        return ret
542
 
 
543
 
 
544
 
class TransportObjectStore(PackBasedObjectStore):
545
 
    """Git-style object store that exists on disk."""
546
 
 
547
 
    def __init__(self, transport):
548
 
        """Open an object store.
549
 
 
550
 
        :param transport: Transport to open data from
551
 
        """
552
 
        super(TransportObjectStore, self).__init__()
553
 
        self.transport = transport
554
 
        self.pack_transport = self.transport.clone(PACKDIR)
555
 
        self._alternates = None
556
 
 
557
 
    def __eq__(self, other):
558
 
        if not isinstance(other, TransportObjectStore):
559
 
            return False
560
 
        return self.transport == other.transport
561
 
 
562
 
    def __repr__(self):
563
 
        return "%s(%r)" % (self.__class__.__name__, self.transport)
564
 
 
565
 
    @property
566
 
    def alternates(self):
567
 
        if self._alternates is not None:
568
 
            return self._alternates
569
 
        self._alternates = []
570
 
        for path in self._read_alternate_paths():
571
 
            # FIXME: Check path
572
 
            t = _mod_transport.get_transport_from_path(path)
573
 
            self._alternates.append(self.__class__(t))
574
 
        return self._alternates
575
 
 
576
 
    def _read_alternate_paths(self):
577
 
        try:
578
 
            f = self.transport.get("info/alternates")
579
 
        except NoSuchFile:
580
 
            return []
581
 
        ret = []
582
 
        with f:
583
 
            for l in f.read().splitlines():
584
 
                if l[0] == b"#":
585
 
                    continue
586
 
                if os.path.isabs(l):
587
 
                    continue
588
 
                ret.append(l)
589
 
            return ret
590
 
 
591
 
    def _update_pack_cache(self):
592
 
        pack_files = set(self._pack_names())
593
 
        new_packs = []
594
 
        for basename in pack_files:
595
 
            pack_name = basename + ".pack"
596
 
            if basename not in self._pack_cache:
597
 
                try:
598
 
                    size = self.pack_transport.stat(pack_name).st_size
599
 
                except TransportNotPossible:
600
 
                    f = self.pack_transport.get(pack_name)
601
 
                    # TODO(jelmer): Don't read entire file into memory?
602
 
                    f = BytesIO(f.read())
603
 
                    pd = PackData(pack_name, f)
604
 
                else:
605
 
                    pd = PackData(
606
 
                        pack_name, self.pack_transport.get(pack_name),
607
 
                        size=size)
608
 
                idxname = basename + ".idx"
609
 
                idx = load_pack_index_file(
610
 
                    idxname, self.pack_transport.get(idxname))
611
 
                pack = Pack.from_objects(pd, idx)
612
 
                pack._basename = basename
613
 
                self._pack_cache[basename] = pack
614
 
                new_packs.append(pack)
615
 
        # Remove disappeared pack files
616
 
        for f in set(self._pack_cache) - pack_files:
617
 
            self._pack_cache.pop(f).close()
618
 
        return new_packs
619
 
 
620
 
    def _pack_names(self):
621
 
        pack_files = []
622
 
        try:
623
 
            dir_contents = self.pack_transport.list_dir(".")
624
 
            for name in dir_contents:
625
 
                if name.startswith("pack-") and name.endswith(".pack"):
626
 
                    # verify that idx exists first (otherwise the pack was not yet
627
 
                    # fully written)
628
 
                    idx_name = os.path.splitext(name)[0] + ".idx"
629
 
                    if idx_name in dir_contents:
630
 
                        pack_files.append(os.path.splitext(name)[0])
631
 
        except TransportNotPossible:
632
 
            try:
633
 
                f = self.transport.get('info/packs')
634
 
            except NoSuchFile:
635
 
                warning('No info/packs on remote host;'
636
 
                        'run \'git update-server-info\' on remote.')
637
 
            else:
638
 
                with f:
639
 
                    pack_files = [
640
 
                        os.path.splitext(name)[0]
641
 
                        for name in read_packs_file(f)]
642
 
        except NoSuchFile:
643
 
            pass
644
 
        return pack_files
645
 
 
646
 
    def _remove_pack(self, pack):
647
 
        self.pack_transport.delete(os.path.basename(pack.index.path))
648
 
        self.pack_transport.delete(pack.data.filename)
649
 
        try:
650
 
            del self._pack_cache[os.path.basename(pack._basename)]
651
 
        except KeyError:
652
 
            pass
653
 
 
654
 
    def _iter_loose_objects(self):
655
 
        for base in self.transport.list_dir('.'):
656
 
            if len(base) != 2:
657
 
                continue
658
 
            for rest in self.transport.list_dir(base):
659
 
                yield (base + rest).encode(sys.getfilesystemencoding())
660
 
 
661
 
    def _split_loose_object(self, sha):
662
 
        return (sha[:2], sha[2:])
663
 
 
664
 
    def _remove_loose_object(self, sha):
665
 
        path = osutils.joinpath(self._split_loose_object(sha))
666
 
        self.transport.delete(urlutils.quote_from_bytes(path))
667
 
 
668
 
    def _get_loose_object(self, sha):
669
 
        path = osutils.joinpath(self._split_loose_object(sha))
670
 
        try:
671
 
            with self.transport.get(urlutils.quote_from_bytes(path)) as f:
672
 
                return ShaFile.from_file(f)
673
 
        except NoSuchFile:
674
 
            return None
675
 
 
676
 
    def add_object(self, obj):
677
 
        """Add a single object to this object store.
678
 
 
679
 
        :param obj: Object to add
680
 
        """
681
 
        (dir, file) = self._split_loose_object(obj.id)
682
 
        try:
683
 
            self.transport.mkdir(urlutils.quote_from_bytes(dir))
684
 
        except FileExists:
685
 
            pass
686
 
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
687
 
        if self.transport.has(path):
688
 
            return  # Already there, no need to write again
689
 
        self.transport.put_bytes(path, obj.as_legacy_object())
690
 
 
691
 
    def move_in_pack(self, f):
692
 
        """Move a specific file containing a pack into the pack directory.
693
 
 
694
 
        :note: The file should be on the same file system as the
695
 
            packs directory.
696
 
 
697
 
        :param path: Path to the pack file.
698
 
        """
699
 
        f.seek(0)
700
 
        p = PackData("", f, len(f.getvalue()))
701
 
        entries = p.sorted_entries()
702
 
        basename = "pack-%s" % iter_sha1(entry[0]
703
 
                                         for entry in entries).decode('ascii')
704
 
        p._filename = basename + ".pack"
705
 
        f.seek(0)
706
 
        self.pack_transport.put_file(basename + ".pack", f)
707
 
        with self.pack_transport.open_write_stream(basename + ".idx") as idxfile:
708
 
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
709
 
        idxfile = self.pack_transport.get(basename + ".idx")
710
 
        idx = load_pack_index_file(basename + ".idx", idxfile)
711
 
        final_pack = Pack.from_objects(p, idx)
712
 
        final_pack._basename = basename
713
 
        self._add_cached_pack(basename, final_pack)
714
 
        return final_pack
715
 
 
716
 
    def move_in_thin_pack(self, f):
717
 
        """Move a specific file containing a pack into the pack directory.
718
 
 
719
 
        :note: The file should be on the same file system as the
720
 
            packs directory.
721
 
 
722
 
        :param path: Path to the pack file.
723
 
        """
724
 
        f.seek(0)
725
 
        p = Pack('', resolve_ext_ref=self.get_raw)
726
 
        p._data = PackData.from_file(f, len(f.getvalue()))
727
 
        p._data.pack = p
728
 
        p._idx_load = lambda: MemoryPackIndex(
729
 
            p.data.sorted_entries(), p.data.get_stored_checksum())
730
 
 
731
 
        pack_sha = p.index.objects_sha1()
732
 
 
733
 
        with self.pack_transport.open_write_stream(
734
 
                "pack-%s.pack" % pack_sha.decode('ascii')) as datafile:
735
 
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
736
 
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
737
 
        with self.pack_transport.open_write_stream(
738
 
                "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile:
739
 
            write_pack_index_v2(idxfile, entries, data_sum)
740
 
 
741
 
    def add_pack(self):
742
 
        """Add a new pack to this object store.
743
 
 
744
 
        :return: Fileobject to write to and a commit function to
745
 
            call when the pack is finished.
746
 
        """
747
 
        f = BytesIO()
748
 
 
749
 
        def commit():
750
 
            if len(f.getvalue()) > 0:
751
 
                return self.move_in_pack(f)
752
 
            else:
753
 
                return None
754
 
 
755
 
        def abort():
756
 
            return None
757
 
        return f, commit, abort
758
 
 
759
 
    @classmethod
760
 
    def init(cls, transport):
761
 
        try:
762
 
            transport.mkdir('info')
763
 
        except FileExists:
764
 
            pass
765
 
        try:
766
 
            transport.mkdir(PACKDIR)
767
 
        except FileExists:
768
 
            pass
769
 
        return cls(transport)