/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/git/transportgit.py

  • Committer: Breezy landing bot
  • Author(s): Jelmer Vernooij
  • Date: 2018-11-16 18:59:44 UTC
  • mfrom: (7143.15.15 more-cleanups)
  • Revision ID: breezy.the.bot@gmail.com-20181116185944-biefv1sub37qfybm
Sprinkle some PEP8iness.

Merged from https://code.launchpad.net/~jelmer/brz/more-cleanups/+merge/358611

Show diffs side-by-side

added added

removed removed

Lines of Context:
97
97
 
98
98
    def _ensure_dir_exists(self, path):
99
99
        for n in range(path.count("/")):
100
 
            dirname = "/".join(path.split("/")[:n+1])
 
100
            dirname = "/".join(path.split("/")[:n + 1])
101
101
            try:
102
102
                self.transport.mkdir(dirname)
103
103
            except FileExists:
126
126
        else:
127
127
            keys.add(b"HEAD")
128
128
        try:
129
 
            iter_files = list(self.transport.clone("refs").iter_files_recursive())
 
129
            iter_files = list(self.transport.clone(
 
130
                "refs").iter_files_recursive())
130
131
            for filename in iter_files:
131
132
                unquoted_filename = urlutils.unquote_to_bytes(filename)
132
133
                refname = osutils.pathjoin(b"refs", unquoted_filename)
175
176
        """Return the cached peeled value of a ref, if available.
176
177
 
177
178
        :param name: Name of the ref to peel
178
 
        :return: The peeled value of the ref. If the ref is known not point to a
179
 
            tag, this will be the SHA the ref refers to. If the ref may point to
180
 
            a tag, but no cached information is available, None is returned.
 
179
        :return: The peeled value of the ref. If the ref is known not point to
 
180
            a tag, this will be the SHA the ref refers to. If the ref may point
 
181
            to a tag, but no cached information is available, None is returned.
181
182
        """
182
183
        self.get_packed_refs()
183
184
        if self._peeled_refs is None or name not in self._packed_refs:
215
216
                return header + next(iter(f)).rstrip(b"\r\n")
216
217
            else:
217
218
                # Read only the first 40 bytes
218
 
                return header + f.read(40-len(SYMREF))
 
219
                return header + f.read(40 - len(SYMREF))
219
220
 
220
221
    def _remove_packed_ref(self, name):
221
222
        if self._packed_refs is None:
250
251
            self._ensure_dir_exists(urlutils.quote_from_bytes(name))
251
252
        else:
252
253
            transport = self.worktree_transport
253
 
        transport.put_bytes(urlutils.quote_from_bytes(name), SYMREF + other + b'\n')
 
254
        transport.put_bytes(urlutils.quote_from_bytes(
 
255
            name), SYMREF + other + b'\n')
254
256
 
255
257
    def set_if_equals(self, name, old_ref, new_ref):
256
258
        """Set a refname to new_ref only if it currently equals old_ref.
274
276
        else:
275
277
            transport = self.transport
276
278
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
277
 
        transport.put_bytes(urlutils.quote_from_bytes(realname), new_ref+b"\n")
 
279
        transport.put_bytes(urlutils.quote_from_bytes(
 
280
            realname), new_ref + b"\n")
278
281
        return True
279
282
 
280
283
    def add_if_new(self, name, ref):
300
303
        else:
301
304
            transport = self.transport
302
305
            self._ensure_dir_exists(urlutils.quote_from_bytes(realname))
303
 
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref+b"\n")
 
306
        transport.put_bytes(urlutils.quote_from_bytes(realname), ref + b"\n")
304
307
        return True
305
308
 
306
309
    def remove_if_equals(self, name, old_ref):
310
313
        perform an atomic compare-and-delete operation.
311
314
 
312
315
        :param name: The refname to delete.
313
 
        :param old_ref: The old sha the refname must refer to, or None to delete
314
 
            unconditionally.
 
316
        :param old_ref: The old sha the refname must refer to, or None to
 
317
            delete unconditionally.
315
318
        :return: True if the delete was successful, False otherwise.
316
319
        """
317
320
        self._check_refname(name)
340
343
            transport = self.transport
341
344
        lockname = name + b".lock"
342
345
        try:
343
 
            self.transport.delete(urlutils.quote_from_bytes(lockname))
 
346
            transport.delete(urlutils.quote_from_bytes(lockname))
344
347
        except NoSuchFile:
345
348
            pass
346
349
 
352
355
        self._ensure_dir_exists(urlutils.quote_from_bytes(name))
353
356
        lockname = urlutils.quote_from_bytes(name + b".lock")
354
357
        try:
355
 
            local_path = self.transport.local_abspath(urlutils.quote_from_bytes(name))
 
358
            local_path = transport.local_abspath(
 
359
                urlutils.quote_from_bytes(name))
356
360
        except NotLocalUrl:
357
361
            # This is racy, but what can we do?
358
 
            if self.transport.has(lockname):
 
362
            if transport.has(lockname):
359
363
                raise LockContention(name)
360
 
            lock_result = self.transport.put_bytes(lockname, b'Locked by brz-git')
361
 
            return LogicalLockResult(lambda: self.transport.delete(lockname))
 
364
            transport.put_bytes(lockname, b'Locked by brz-git')
 
365
            return LogicalLockResult(lambda: transport.delete(lockname))
362
366
        else:
363
367
            try:
364
368
                gf = GitFile(local_path, 'wb')
367
371
            else:
368
372
                def unlock():
369
373
                    try:
370
 
                        self.transport.delete(lockname)
 
374
                        transport.delete(lockname)
371
375
                    except NoSuchFile:
372
376
                        raise LockBroken(lockname)
373
 
                    # GitFile.abort doesn't care if the lock has already disappeared
 
377
                    # GitFile.abort doesn't care if the lock has already
 
378
                    # disappeared
374
379
                    gf.abort()
375
380
                return LogicalLockResult(unlock)
376
381
 
405
410
            else:
406
411
                self._controltransport = self.transport.clone('.git')
407
412
        else:
408
 
            self._controltransport = self.transport.clone(urlutils.quote_from_bytes(path))
 
413
            self._controltransport = self.transport.clone(
 
414
                urlutils.quote_from_bytes(path))
409
415
        commondir = self.get_named_file(COMMONDIR)
410
416
        if commondir is not None:
411
417
            with commondir:
422
428
        if refs_text is not None:
423
429
            refs_container = InfoRefsContainer(BytesIO(refs_text))
424
430
            try:
425
 
                head = TransportRefsContainer(self._commontransport).read_loose_ref("HEAD")
 
431
                head = TransportRefsContainer(
 
432
                    self._commontransport).read_loose_ref("HEAD")
426
433
            except KeyError:
427
434
                pass
428
435
            else:
429
436
                refs_container._refs["HEAD"] = head
430
437
        else:
431
438
            refs_container = TransportRefsContainer(
432
 
                    self._commontransport, self._controltransport)
 
439
                self._commontransport, self._controltransport)
433
440
        super(TransportRepo, self).__init__(object_store,
434
 
                refs_container)
 
441
                                            refs_container)
435
442
 
436
443
    def controldir(self):
437
444
        return self._controltransport.local_abspath('.')
621
628
                    size = self.pack_transport.stat(name).st_size
622
629
                except TransportNotPossible:
623
630
                    f = self.pack_transport.get(name)
624
 
                    pd = PackData(name, f, size=len(contents))
 
631
                    pd = PackData(name, f)
625
632
                else:
626
633
                    pd = PackData(name, self.pack_transport.get(name),
627
 
                            size=size)
 
634
                                  size=size)
628
635
                idxname = name.replace(".pack", ".idx")
629
 
                idx = load_pack_index_file(idxname, self.pack_transport.get(idxname))
 
636
                idx = load_pack_index_file(
 
637
                    idxname, self.pack_transport.get(idxname))
630
638
                pack = Pack.from_objects(pd, idx)
631
639
                pack._basename = idxname[:-4]
632
640
                ret.append(pack)
637
645
            if len(base) != 2:
638
646
                continue
639
647
            for rest in self.transport.list_dir(base):
640
 
                yield (base+rest).encode(sys.getfilesystemencoding())
 
648
                yield (base + rest).encode(sys.getfilesystemencoding())
641
649
 
642
650
    def _split_loose_object(self, sha):
643
651
        return (sha[:2], sha[2:])
666
674
            pass
667
675
        path = urlutils.quote_from_bytes(osutils.pathjoin(dir, file))
668
676
        if self.transport.has(path):
669
 
            return # Already there, no need to write again
 
677
            return  # Already there, no need to write again
670
678
        self.transport.put_bytes(path, obj.as_legacy_object())
671
679
 
672
680
    def move_in_pack(self, f):
680
688
        f.seek(0)
681
689
        p = PackData("", f, len(f.getvalue()))
682
690
        entries = p.sorted_entries()
683
 
        basename = "pack-%s" % iter_sha1(entry[0] for entry in entries).decode('ascii')
 
691
        basename = "pack-%s" % iter_sha1(entry[0]
 
692
                                         for entry in entries).decode('ascii')
684
693
        p._filename = basename + ".pack"
685
694
        f.seek(0)
686
695
        self.pack_transport.put_file(basename + ".pack", f)
690
699
        finally:
691
700
            idxfile.close()
692
701
        idxfile = self.pack_transport.get(basename + ".idx")
693
 
        idx = load_pack_index_file(basename+".idx", idxfile)
 
702
        idx = load_pack_index_file(basename + ".idx", idxfile)
694
703
        final_pack = Pack.from_objects(p, idx)
695
704
        final_pack._basename = basename
696
705
        self._add_known_pack(basename, final_pack)
708
717
        p = Pack('', resolve_ext_ref=self.get_raw)
709
718
        p._data = PackData.from_file(f, len(f.getvalue()))
710
719
        p._data.pack = p
711
 
        p._idx_load = lambda: MemoryPackIndex(p.data.sorted_entries(), p.data.get_stored_checksum())
 
720
        p._idx_load = lambda: MemoryPackIndex(
 
721
            p.data.sorted_entries(), p.data.get_stored_checksum())
712
722
 
713
723
        pack_sha = p.index.objects_sha1()
714
724
 
715
725
        datafile = self.pack_transport.open_write_stream(
716
 
                "pack-%s.pack" % pack_sha.decode('ascii'))
 
726
            "pack-%s.pack" % pack_sha.decode('ascii'))
717
727
        try:
718
728
            entries, data_sum = write_pack_objects(datafile, p.pack_tuples())
719
729
        finally:
735
745
            call when the pack is finished.
736
746
        """
737
747
        f = BytesIO()
 
748
 
738
749
        def commit():
739
750
            if len(f.getvalue()) > 0:
740
751
                return self.move_in_pack(f)
741
752
            else:
742
753
                return None
 
754
 
743
755
        def abort():
744
756
            return None
745
757
        return f, commit, abort