/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/plugins/git/transportgit.py

  • Committer: Breezy landing bot
  • Author(s): Martin
  • Date: 2018-07-15 09:48:59 UTC
  • mfrom: (7038.1.5 btree_serializer_pyx_py3)
  • Revision ID: breezy.the.bot@gmail.com-20180715094859-2kha7mnbwhd0op7s
Make _btree_serializer_pyx module work on Python 3

Merged from https://code.launchpad.net/~gz/brz/btree_serializer_pyx_py3/+merge/349624

Show diffs side-by-side

added added

removed removed

Lines of Context:
 
1
# Copyright (C) 2010-2018 Jelmer Vernooij <jelmer@jelmer.uk>
 
2
#
 
3
# This program is free software; you can redistribute it and/or modify
 
4
# it under the terms of the GNU General Public License as published by
 
5
# the Free Software Foundation; either version 2 of the License, or
 
6
# (at your option) any later version.
 
7
#
 
8
# This program is distributed in the hope that it will be useful,
 
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
11
# GNU General Public License for more details.
 
12
#
 
13
# You should have received a copy of the GNU General Public License
 
14
# along with this program; if not, write to the Free Software
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
"""A Git repository implementation that uses a Bazaar transport."""
 
18
 
 
19
from __future__ import absolute_import
 
20
 
 
21
from io import BytesIO
 
22
 
 
23
import os
 
24
import sys
 
25
 
 
26
from dulwich.errors import (
 
27
    NotGitRepository,
 
28
    NoIndexPresent,
 
29
    )
 
30
from dulwich.file import (
 
31
    GitFile,
 
32
    FileLocked,
 
33
    )
 
34
from dulwich.objects import (
 
35
    ShaFile,
 
36
    )
 
37
from dulwich.object_store import (
 
38
    PackBasedObjectStore,
 
39
    PACKDIR,
 
40
    )
 
41
from dulwich.pack import (
 
42
    MemoryPackIndex,
 
43
    PackData,
 
44
    Pack,
 
45
    iter_sha1,
 
46
    load_pack_index_file,
 
47
    write_pack_objects,
 
48
    write_pack_index_v2,
 
49
    )
 
50
from dulwich.repo import (
 
51
    BaseRepo,
 
52
    InfoRefsContainer,
 
53
    RefsContainer,
 
54
    BASE_DIRECTORIES,
 
55
    COMMONDIR,
 
56
    CONTROLDIR,
 
57
    INDEX_FILENAME,
 
58
    OBJECTDIR,
 
59
    REFSDIR,
 
60
    SYMREF,
 
61
    check_ref_format,
 
62
    read_gitfile,
 
63
    read_packed_refs_with_peeled,
 
64
    read_packed_refs,
 
65
    write_packed_refs,
 
66
    )
 
67
 
 
68
from ... import (
 
69
    osutils,
 
70
    transport as _mod_transport,
 
71
    urlutils,
 
72
    )
 
73
from ...sixish import (
 
74
    PY3,
 
75
    text_type,
 
76
    )
 
77
from ...errors import (
 
78
    AlreadyControlDirError,
 
79
    FileExists,
 
80
    LockBroken,
 
81
    LockError,
 
82
    LockContention,
 
83
    NotLocalUrl,
 
84
    NoSuchFile,
 
85
    ReadError,
 
86
    TransportNotPossible,
 
87
    )
 
88
 
 
89
from ...lock import LogicalLockResult
 
90
 
 
91
 
 
92
class TransportRefsContainer(RefsContainer):
 
93
    """Refs container that reads refs from a transport."""
 
94
 
 
95
    def __init__(self, transport, worktree_transport=None):
 
96
        self.transport = transport
 
97
        if worktree_transport is None:
 
98
            worktree_transport = transport
 
99
        self.worktree_transport = worktree_transport
 
100
        self._packed_refs = None
 
101
        self._peeled_refs = None
 
102
 
 
103
    def __repr__(self):
 
104
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
105
 
 
106
    def _ensure_dir_exists(self, path):
 
107
        for n in range(path.count(b"/")):
 
108
            dirname = b"/".join(path.split(b"/")[:n+1])
 
109
            try:
 
110
                self.transport.mkdir(dirname)
 
111
            except FileExists:
 
112
                pass
 
113
 
 
114
    def subkeys(self, base):
 
115
        """Refs present in this container under a base.
 
116
 
 
117
        :param base: The base to return refs under.
 
118
        :return: A set of valid refs in this container under the base; the base
 
119
            prefix is stripped from the ref names returned.
 
120
        """
 
121
        keys = set()
 
122
        base_len = len(base) + 1
 
123
        for refname in self.allkeys():
 
124
            if refname.startswith(base):
 
125
                keys.add(refname[base_len:])
 
126
        return keys
 
127
 
 
128
    def allkeys(self):
 
129
        keys = set()
 
130
        try:
 
131
            self.worktree_transport.get_bytes("HEAD")
 
132
        except NoSuchFile:
 
133
            pass
 
134
        else:
 
135
            keys.add(b"HEAD")
 
136
        try:
 
137
            iter_files = list(self.transport.clone("refs").iter_files_recursive())
 
138
            for filename in iter_files:
 
139
                unquoted_filename = urlutils.unquote(filename)
 
140
                if PY3:
 
141
                    # JRV: Work around unquote returning a text_type string on
 
142
                    # PY3.
 
143
                    unquoted_filename = unquoted_filename.encode('utf-8')
 
144
                refname = osutils.pathjoin(b"refs", unquoted_filename)
 
145
                if check_ref_format(refname):
 
146
                    keys.add(refname)
 
147
        except (TransportNotPossible, NoSuchFile):
 
148
            pass
 
149
        keys.update(self.get_packed_refs())
 
150
        return keys
 
151
 
 
152
    def get_packed_refs(self):
 
153
        """Get contents of the packed-refs file.
 
154
 
 
155
        :return: Dictionary mapping ref names to SHA1s
 
156
 
 
157
        :note: Will return an empty dictionary when no packed-refs file is
 
158
            present.
 
159
        """
 
160
        # TODO: invalidate the cache on repacking
 
161
        if self._packed_refs is None:
 
162
            # set both to empty because we want _peeled_refs to be
 
163
            # None if and only if _packed_refs is also None.
 
164
            self._packed_refs = {}
 
165
            self._peeled_refs = {}
 
166
            try:
 
167
                f = self.transport.get("packed-refs")
 
168
            except NoSuchFile:
 
169
                return {}
 
170
            try:
 
171
                first_line = next(iter(f)).rstrip()
 
172
                if (first_line.startswith("# pack-refs") and " peeled" in
 
173
                        first_line):
 
174
                    for sha, name, peeled in read_packed_refs_with_peeled(f):
 
175
                        self._packed_refs[name] = sha
 
176
                        if peeled:
 
177
                            self._peeled_refs[name] = peeled
 
178
                else:
 
179
                    f.seek(0)
 
180
                    for sha, name in read_packed_refs(f):
 
181
                        self._packed_refs[name] = sha
 
182
            finally:
 
183
                f.close()
 
184
        return self._packed_refs
 
185
 
 
186
    def get_peeled(self, name):
 
187
        """Return the cached peeled value of a ref, if available.
 
188
 
 
189
        :param name: Name of the ref to peel
 
190
        :return: The peeled value of the ref. If the ref is known not point to a
 
191
            tag, this will be the SHA the ref refers to. If the ref may point to
 
192
            a tag, but no cached information is available, None is returned.
 
193
        """
 
194
        self.get_packed_refs()
 
195
        if self._peeled_refs is None or name not in self._packed_refs:
 
196
            # No cache: no peeled refs were read, or this ref is loose
 
197
            return None
 
198
        if name in self._peeled_refs:
 
199
            return self._peeled_refs[name]
 
200
        else:
 
201
            # Known not peelable
 
202
            return self[name]
 
203
 
 
204
    def read_loose_ref(self, name):
 
205
        """Read a reference file and return its contents.
 
206
 
 
207
        If the reference file a symbolic reference, only read the first line of
 
208
        the file. Otherwise, only read the first 40 bytes.
 
209
 
 
210
        :param name: the refname to read, relative to refpath
 
211
        :return: The contents of the ref file, or None if the file does not
 
212
            exist.
 
213
        :raises IOError: if any other error occurs
 
214
        """
 
215
        if name == b'HEAD':
 
216
            transport = self.worktree_transport
 
217
        else:
 
218
            transport = self.transport
 
219
        try:
 
220
            f = transport.get(name)
 
221
        except NoSuchFile:
 
222
            return None
 
223
        with f:
 
224
            header = f.read(len(SYMREF))
 
225
            if header == SYMREF:
 
226
                # Read only the first line
 
227
                return header + next(iter(f)).rstrip(b"\r\n")
 
228
            else:
 
229
                # Read only the first 40 bytes
 
230
                return header + f.read(40-len(SYMREF))
 
231
 
 
232
    def _remove_packed_ref(self, name):
 
233
        if self._packed_refs is None:
 
234
            return
 
235
        # reread cached refs from disk, while holding the lock
 
236
 
 
237
        self._packed_refs = None
 
238
        self.get_packed_refs()
 
239
 
 
240
        if name not in self._packed_refs:
 
241
            return
 
242
 
 
243
        del self._packed_refs[name]
 
244
        if name in self._peeled_refs:
 
245
            del self._peeled_refs[name]
 
246
        f = self.transport.open_write_stream("packed-refs")
 
247
        try:
 
248
            write_packed_refs(f, self._packed_refs, self._peeled_refs)
 
249
        finally:
 
250
            f.close()
 
251
 
 
252
    def set_symbolic_ref(self, name, other):
 
253
        """Make a ref point at another ref.
 
254
 
 
255
        :param name: Name of the ref to set
 
256
        :param other: Name of the ref to point at
 
257
        """
 
258
        self._check_refname(name)
 
259
        self._check_refname(other)
 
260
        if name != b'HEAD':
 
261
            transport = self.transport
 
262
            self._ensure_dir_exists(name)
 
263
        else:
 
264
            transport = self.worktree_transport
 
265
        transport.put_bytes(name, SYMREF + other + b'\n')
 
266
 
 
267
    def set_if_equals(self, name, old_ref, new_ref):
 
268
        """Set a refname to new_ref only if it currently equals old_ref.
 
269
 
 
270
        This method follows all symbolic references, and can be used to perform
 
271
        an atomic compare-and-swap operation.
 
272
 
 
273
        :param name: The refname to set.
 
274
        :param old_ref: The old sha the refname must refer to, or None to set
 
275
            unconditionally.
 
276
        :param new_ref: The new sha the refname will refer to.
 
277
        :return: True if the set was successful, False otherwise.
 
278
        """
 
279
        try:
 
280
            realnames, _ = self.follow(name)
 
281
            realname = realnames[-1]
 
282
        except (KeyError, IndexError):
 
283
            realname = name
 
284
        if realname == b'HEAD':
 
285
            transport = self.worktree_transport
 
286
        else:
 
287
            transport = self.transport
 
288
            self._ensure_dir_exists(realname)
 
289
        transport.put_bytes(realname, new_ref+b"\n")
 
290
        return True
 
291
 
 
292
    def add_if_new(self, name, ref):
 
293
        """Add a new reference only if it does not already exist.
 
294
 
 
295
        This method follows symrefs, and only ensures that the last ref in the
 
296
        chain does not exist.
 
297
 
 
298
        :param name: The refname to set.
 
299
        :param ref: The new sha the refname will refer to.
 
300
        :return: True if the add was successful, False otherwise.
 
301
        """
 
302
        try:
 
303
            realnames, contents = self.follow(name)
 
304
            if contents is not None:
 
305
                return False
 
306
            realname = realnames[-1]
 
307
        except (KeyError, IndexError):
 
308
            realname = name
 
309
        self._check_refname(realname)
 
310
        if realname == b'HEAD':
 
311
            transport = self.worktree_transport
 
312
        else:
 
313
            transport = self.transport
 
314
            self._ensure_dir_exists(realname)
 
315
        transport.put_bytes(realname, ref+b"\n")
 
316
        return True
 
317
 
 
318
    def remove_if_equals(self, name, old_ref):
 
319
        """Remove a refname only if it currently equals old_ref.
 
320
 
 
321
        This method does not follow symbolic references. It can be used to
 
322
        perform an atomic compare-and-delete operation.
 
323
 
 
324
        :param name: The refname to delete.
 
325
        :param old_ref: The old sha the refname must refer to, or None to delete
 
326
            unconditionally.
 
327
        :return: True if the delete was successful, False otherwise.
 
328
        """
 
329
        self._check_refname(name)
 
330
        # may only be packed
 
331
        if name == b'HEAD':
 
332
            transport = self.worktree_transport
 
333
        else:
 
334
            transport = self.transport
 
335
        try:
 
336
            transport.delete(name)
 
337
        except NoSuchFile:
 
338
            pass
 
339
        self._remove_packed_ref(name)
 
340
        return True
 
341
 
 
342
    def get(self, name, default=None):
 
343
        try:
 
344
            return self[name]
 
345
        except KeyError:
 
346
            return default
 
347
 
 
348
    def unlock_ref(self, name):
 
349
        if name == b"HEAD":
 
350
            transport = self.worktree_transport
 
351
        else:
 
352
            transport = self.transport
 
353
        lockname = name + b".lock"
 
354
        try:
 
355
            self.transport.delete(lockname)
 
356
        except NoSuchFile:
 
357
            pass
 
358
 
 
359
    def lock_ref(self, name):
 
360
        if name == b"HEAD":
 
361
            transport = self.worktree_transport
 
362
        else:
 
363
            transport = self.transport
 
364
        self._ensure_dir_exists(name)
 
365
        lockname = name + b".lock"
 
366
        try:
 
367
            local_path = self.transport.local_abspath(name)
 
368
        except NotLocalUrl:
 
369
            # This is racy, but what can we do?
 
370
            if self.transport.has(lockname):
 
371
                raise LockContention(name)
 
372
            lock_result = self.transport.put_bytes(lockname, b'Locked by brz-git')
 
373
            return LogicalLockResult(lambda: self.transport.delete(lockname))
 
374
        else:
 
375
            try:
 
376
                gf = GitFile(local_path, 'wb')
 
377
            except FileLocked as e:
 
378
                raise LockContention(name, e)
 
379
            else:
 
380
                def unlock():
 
381
                    try:
 
382
                        self.transport.delete(lockname)
 
383
                    except NoSuchFile:
 
384
                        raise LockBroken(lockname)
 
385
                    # GitFile.abort doesn't care if the lock has already disappeared
 
386
                    gf.abort()
 
387
                return LogicalLockResult(unlock)
 
388
 
 
389
 
 
390
class TransportRepo(BaseRepo):
 
391
 
 
392
    def __init__(self, transport, bare, refs_text=None):
 
393
        self.transport = transport
 
394
        self.bare = bare
 
395
        try:
 
396
            with transport.get(CONTROLDIR) as f:
 
397
                path = read_gitfile(f)
 
398
        except (ReadError, NoSuchFile):
 
399
            if self.bare:
 
400
                self._controltransport = self.transport
 
401
            else:
 
402
                self._controltransport = self.transport.clone('.git')
 
403
        else:
 
404
            self._controltransport = self.transport.clone(path)
 
405
        commondir = self.get_named_file(COMMONDIR)
 
406
        if commondir is not None:
 
407
            with commondir:
 
408
                commondir = os.path.join(
 
409
                    self.controldir(),
 
410
                    commondir.read().rstrip(b"\r\n").decode(
 
411
                        sys.getfilesystemencoding()))
 
412
                self._commontransport = \
 
413
                    _mod_transport.get_transport_from_path(commondir)
 
414
        else:
 
415
            self._commontransport = self._controltransport
 
416
        object_store = TransportObjectStore(
 
417
            self._commontransport.clone(OBJECTDIR))
 
418
        if refs_text is not None:
 
419
            refs_container = InfoRefsContainer(BytesIO(refs_text))
 
420
            try:
 
421
                head = TransportRefsContainer(self._commontransport).read_loose_ref("HEAD")
 
422
            except KeyError:
 
423
                pass
 
424
            else:
 
425
                refs_container._refs["HEAD"] = head
 
426
        else:
 
427
            refs_container = TransportRefsContainer(
 
428
                    self._commontransport, self._controltransport)
 
429
        super(TransportRepo, self).__init__(object_store,
 
430
                refs_container)
 
431
 
 
432
    def controldir(self):
 
433
        return self._controltransport.local_abspath('.')
 
434
 
 
435
    def commondir(self):
 
436
        return self._commontransport.local_abspath('.')
 
437
 
 
438
    @property
 
439
    def path(self):
 
440
        return self.transport.local_abspath('.')
 
441
 
 
442
    def _determine_file_mode(self):
 
443
        # Be consistent with bzr
 
444
        if sys.platform == 'win32':
 
445
            return False
 
446
        return True
 
447
 
 
448
    def get_named_file(self, path):
 
449
        """Get a file from the control dir with a specific name.
 
450
 
 
451
        Although the filename should be interpreted as a filename relative to
 
452
        the control dir in a disk-baked Repo, the object returned need not be
 
453
        pointing to a file in that location.
 
454
 
 
455
        :param path: The path to the file, relative to the control dir.
 
456
        :return: An open file object, or None if the file does not exist.
 
457
        """
 
458
        try:
 
459
            return self._controltransport.get(path.lstrip('/'))
 
460
        except NoSuchFile:
 
461
            return None
 
462
 
 
463
    def _put_named_file(self, relpath, contents):
 
464
        self._controltransport.put_bytes(relpath, contents)
 
465
 
 
466
    def index_path(self):
 
467
        """Return the path to the index file."""
 
468
        return self._controltransport.local_abspath(INDEX_FILENAME)
 
469
 
 
470
    def open_index(self):
 
471
        """Open the index for this repository."""
 
472
        from dulwich.index import Index
 
473
        if not self.has_index():
 
474
            raise NoIndexPresent()
 
475
        return Index(self.index_path())
 
476
 
 
477
    def has_index(self):
 
478
        """Check if an index is present."""
 
479
        # Bare repos must never have index files; non-bare repos may have a
 
480
        # missing index file, which is treated as empty.
 
481
        return not self.bare
 
482
 
 
483
    def get_config(self):
 
484
        from dulwich.config import ConfigFile
 
485
        try:
 
486
            with self._controltransport.get('config') as f:
 
487
                return ConfigFile.from_file(f)
 
488
        except NoSuchFile:
 
489
            return ConfigFile()
 
490
 
 
491
    def get_config_stack(self):
 
492
        from dulwich.config import StackedConfig
 
493
        backends = []
 
494
        p = self.get_config()
 
495
        if p is not None:
 
496
            backends.append(p)
 
497
            writable = p
 
498
        else:
 
499
            writable = None
 
500
        backends.extend(StackedConfig.default_backends())
 
501
        return StackedConfig(backends, writable=writable)
 
502
 
 
503
    def __repr__(self):
 
504
        return "<%s for %r>" % (self.__class__.__name__, self.transport)
 
505
 
 
506
    @classmethod
 
507
    def init(cls, transport, bare=False):
 
508
        if not bare:
 
509
            try:
 
510
                transport.mkdir(".git")
 
511
            except FileExists:
 
512
                raise AlreadyControlDirError(transport.base)
 
513
            control_transport = transport.clone(".git")
 
514
        else:
 
515
            control_transport = transport
 
516
        for d in BASE_DIRECTORIES:
 
517
            try:
 
518
                control_transport.mkdir("/".join(d))
 
519
            except FileExists:
 
520
                pass
 
521
        try:
 
522
            control_transport.mkdir(OBJECTDIR)
 
523
        except FileExists:
 
524
            raise AlreadyControlDirError(transport.base)
 
525
        TransportObjectStore.init(control_transport.clone(OBJECTDIR))
 
526
        ret = cls(transport, bare)
 
527
        ret.refs.set_symbolic_ref(b"HEAD", b"refs/heads/master")
 
528
        ret._init_files(bare)
 
529
        return ret
 
530
 
 
531
 
 
532
class TransportObjectStore(PackBasedObjectStore):
 
533
    """Git-style object store that exists on disk."""
 
534
 
 
535
    def __init__(self, transport):
 
536
        """Open an object store.
 
537
 
 
538
        :param transport: Transport to open data from
 
539
        """
 
540
        super(TransportObjectStore, self).__init__()
 
541
        self.transport = transport
 
542
        self.pack_transport = self.transport.clone(PACKDIR)
 
543
        self._alternates = None
 
544
 
 
545
    def __eq__(self, other):
 
546
        if not isinstance(other, TransportObjectStore):
 
547
            return False
 
548
        return self.transport == other.transport
 
549
 
 
550
    def __repr__(self):
 
551
        return "%s(%r)" % (self.__class__.__name__, self.transport)
 
552
 
 
553
    @property
 
554
    def alternates(self):
 
555
        if self._alternates is not None:
 
556
            return self._alternates
 
557
        self._alternates = []
 
558
        for path in self._read_alternate_paths():
 
559
            # FIXME: Check path
 
560
            t = _mod_transport.get_transport_from_path(path)
 
561
            self._alternates.append(self.__class__(t))
 
562
        return self._alternates
 
563
 
 
564
    def _read_alternate_paths(self):
 
565
        try:
 
566
            f = self.transport.get("info/alternates")
 
567
        except NoSuchFile:
 
568
            return []
 
569
        ret = []
 
570
        with f:
 
571
            for l in f.read().splitlines():
 
572
                if l[0] == b"#":
 
573
                    continue
 
574
                if os.path.isabs(l):
 
575
                    continue
 
576
                ret.append(l)
 
577
            return ret
 
578
 
 
579
    @property
 
580
    def packs(self):
 
581
        # FIXME: Never invalidates.
 
582
        if not self._pack_cache:
 
583
            self._update_pack_cache()
 
584
        return self._pack_cache.values()
 
585
 
 
586
    def _update_pack_cache(self):
 
587
        for pack in self._load_packs():
 
588
            self._pack_cache[pack._basename] = pack
 
589
 
 
590
    def _pack_names(self):
 
591
        try:
 
592
            f = self.transport.get('info/packs')
 
593
        except NoSuchFile:
 
594
            return self.pack_transport.list_dir(".")
 
595
        else:
 
596
            with f:
 
597
                ret = []
 
598
                for line in f.read().splitlines():
 
599
                    if not line:
 
600
                        continue
 
601
                    (kind, name) = line.split(b" ", 1)
 
602
                    if kind != b"P":
 
603
                        continue
 
604
                    ret.append(name)
 
605
                return ret
 
606
 
 
607
    def _remove_pack(self, pack):
 
608
        self.pack_transport.delete(os.path.basename(pack.index.path))
 
609
        self.pack_transport.delete(pack.data.filename)
 
610
 
 
611
    def _load_packs(self):
 
612
        ret = []
 
613
        for name in self._pack_names():
 
614
            if name.startswith("pack-") and name.endswith(".pack"):
 
615
                try:
 
616
                    size = self.pack_transport.stat(name).st_size
 
617
                except TransportNotPossible:
 
618
                    f = self.pack_transport.get(name)
 
619
                    pd = PackData(name, f, size=len(contents))
 
620
                else:
 
621
                    pd = PackData(name, self.pack_transport.get(name),
 
622
                            size=size)
 
623
                idxname = name.replace(".pack", ".idx")
 
624
                idx = load_pack_index_file(idxname, self.pack_transport.get(idxname))
 
625
                pack = Pack.from_objects(pd, idx)
 
626
                pack._basename = idxname[:-4]
 
627
                ret.append(pack)
 
628
        return ret
 
629
 
 
630
    def _iter_loose_objects(self):
 
631
        for base in self.transport.list_dir('.'):
 
632
            if len(base) != 2:
 
633
                continue
 
634
            for rest in self.transport.list_dir(base):
 
635
                yield base+rest
 
636
 
 
637
    def _split_loose_object(self, sha):
 
638
        return (sha[:2], sha[2:])
 
639
 
 
640
    def _remove_loose_object(self, sha):
 
641
        path = osutils.joinpath(self._split_loose_object(sha))
 
642
        self.transport.delete(path)
 
643
 
 
644
    def _get_loose_object(self, sha):
 
645
        path = osutils.joinpath(self._split_loose_object(sha))
 
646
        try:
 
647
            with self.transport.get(path) as f:
 
648
                return ShaFile.from_file(f)
 
649
        except NoSuchFile:
 
650
            return None
 
651
 
 
652
    def add_object(self, obj):
 
653
        """Add a single object to this object store.
 
654
 
 
655
        :param obj: Object to add
 
656
        """
 
657
        (dir, file) = self._split_loose_object(obj.id)
 
658
        try:
 
659
            self.transport.mkdir(dir)
 
660
        except FileExists:
 
661
            pass
 
662
        path = osutils.pathjoin(dir, file)
 
663
        if self.transport.has(path):
 
664
            return # Already there, no need to write again
 
665
        self.transport.put_bytes(path, obj.as_legacy_object())
 
666
 
 
667
    def move_in_pack(self, f):
 
668
        """Move a specific file containing a pack into the pack directory.
 
669
 
 
670
        :note: The file should be on the same file system as the
 
671
            packs directory.
 
672
 
 
673
        :param path: Path to the pack file.
 
674
        """
 
675
        f.seek(0)
 
676
        p = PackData("", f, len(f.getvalue()))
 
677
        entries = p.sorted_entries()
 
678
        basename = "pack-%s" % iter_sha1(entry[0] for entry in entries).decode('ascii')
 
679
        p._filename = basename + ".pack"
 
680
        f.seek(0)
 
681
        self.pack_transport.put_file(basename + ".pack", f)
 
682
        idxfile = self.pack_transport.open_write_stream(basename + ".idx")
 
683
        try:
 
684
            write_pack_index_v2(idxfile, entries, p.get_stored_checksum())
 
685
        finally:
 
686
            idxfile.close()
 
687
        idxfile = self.pack_transport.get(basename + ".idx")
 
688
        idx = load_pack_index_file(basename+".idx", idxfile)
 
689
        final_pack = Pack.from_objects(p, idx)
 
690
        final_pack._basename = basename
 
691
        self._add_known_pack(basename, final_pack)
 
692
        return final_pack
 
693
 
 
694
    def move_in_thin_pack(self, f):
 
695
        """Move a specific file containing a pack into the pack directory.
 
696
 
 
697
        :note: The file should be on the same file system as the
 
698
            packs directory.
 
699
 
 
700
        :param path: Path to the pack file.
 
701
        """
 
702
        f.seek(0)
 
703
        p = Pack('', resolve_ext_ref=self.get_raw)
 
704
        p._data = PackData.from_file(f, len(f.getvalue()))
 
705
        p._data.pack = p
 
706
        p._idx_load = lambda: MemoryPackIndex(p.data.sorted_entries(), p.data.get_stored_checksum())
 
707
 
 
708
        pack_sha = p.index.objects_sha1()
 
709
 
 
710
        datafile = self.pack_transport.open_write_stream(
 
711
                "pack-%s.pack" % pack_sha.decode('ascii'))
 
712
        try:
 
713
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
 
714
        finally:
 
715
            datafile.close()
 
716
        entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
 
717
        idxfile = self.pack_transport.open_write_stream(
 
718
            "pack-%s.idx" % pack_sha.decode('ascii'))
 
719
        try:
 
720
            write_pack_index_v2(idxfile, entries, data_sum)
 
721
        finally:
 
722
            idxfile.close()
 
723
        # TODO(jelmer): Just add new pack to the cache
 
724
        self._flush_pack_cache()
 
725
 
 
726
    def add_pack(self):
 
727
        """Add a new pack to this object store.
 
728
 
 
729
        :return: Fileobject to write to and a commit function to
 
730
            call when the pack is finished.
 
731
        """
 
732
        f = BytesIO()
 
733
        def commit():
 
734
            if len(f.getvalue()) > 0:
 
735
                return self.move_in_pack(f)
 
736
            else:
 
737
                return None
 
738
        def abort():
 
739
            return None
 
740
        return f, commit, abort
 
741
 
 
742
    @classmethod
 
743
    def init(cls, transport):
 
744
        try:
 
745
            transport.mkdir('info')
 
746
        except FileExists:
 
747
            pass
 
748
        try:
 
749
            transport.mkdir(PACKDIR)
 
750
        except FileExists:
 
751
            pass
 
752
        return cls(transport)