/brz/remove-bazaar

To get this branch, use:
bzr branch http://gegoxaren.bato24.eu/bzr/brz/remove-bazaar

« back to all changes in this revision

Viewing changes to breezy/knit.py

  • Committer: Martin
  • Date: 2017-05-25 01:35:55 UTC
  • mto: This revision was merged to the branch mainline in revision 6637.
  • Revision ID: gzlist@googlemail.com-20170525013555-lepzczdnzb9r272j
Apply 2to3 next fixer and make compatible

Show diffs side-by-side

added added

removed removed

Lines of Context:
191
191
        delta = self._annotate_factory.parse_line_delta(contents, rec[1],
192
192
            plain=True)
193
193
        compression_parent = factory.parents[0]
194
 
        basis_entry = self._basis_vf.get_record_stream(
195
 
            [compression_parent], 'unordered', True).next()
 
194
        basis_entry = next(self._basis_vf.get_record_stream(
 
195
            [compression_parent], 'unordered', True))
196
196
        if basis_entry.storage_kind == 'absent':
197
197
            raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
198
198
        basis_chunks = basis_entry.get_bytes_as('chunked')
227
227
        delta = self._plain_factory.parse_line_delta(contents, rec[1])
228
228
        compression_parent = factory.parents[0]
229
229
        # XXX: string splitting overhead.
230
 
        basis_entry = self._basis_vf.get_record_stream(
231
 
            [compression_parent], 'unordered', True).next()
 
230
        basis_entry = next(self._basis_vf.get_record_stream(
 
231
            [compression_parent], 'unordered', True))
232
232
        if basis_entry.storage_kind == 'absent':
233
233
            raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
234
234
        basis_chunks = basis_entry.get_bytes_as('chunked')
619
619
        """
620
620
        result = []
621
621
        lines = iter(lines)
622
 
        next = lines.next
623
622
 
624
623
        cache = {}
625
624
        def cache_and_return(line):
632
631
        if plain:
633
632
            for header in lines:
634
633
                start, end, count = [int(n) for n in header.split(',')]
635
 
                contents = [next().split(' ', 1)[1] for i in xrange(count)]
 
634
                contents = [next(lines).split(' ', 1)[1] for _ in range(count)]
636
635
                result.append((start, end, count, contents))
637
636
        else:
638
637
            for header in lines:
639
638
                start, end, count = [int(n) for n in header.split(',')]
640
 
                contents = [tuple(next().split(' ', 1)) for i in xrange(count)]
 
639
                contents = [tuple(next(lines).split(' ', 1))
 
640
                    for _ in range(count)]
641
641
                result.append((start, end, count, contents))
642
642
        return result
643
643
 
652
652
        Only the actual content lines.
653
653
        """
654
654
        lines = iter(lines)
655
 
        next = lines.next
656
655
        for header in lines:
657
656
            header = header.split(',')
658
657
            count = int(header[2])
659
658
            for i in xrange(count):
660
 
                origin, text = next().split(' ', 1)
 
659
                origin, text = next(lines).split(' ', 1)
661
660
                yield text
662
661
 
663
662
    def lower_fulltext(self, content):
738
737
        Only the actual content lines.
739
738
        """
740
739
        lines = iter(lines)
741
 
        next = lines.next
742
740
        for header in lines:
743
741
            header = header.split(',')
744
742
            count = int(header[2])
745
743
            for i in xrange(count):
746
 
                yield next()
 
744
                yield next(lines)
747
745
 
748
746
    def lower_fulltext(self, content):
749
747
        return content.text()
1967
1965
            raw_records = self._access.get_raw_records(needed_offsets)
1968
1966
 
1969
1967
        for key, index_memo in records:
1970
 
            data = raw_records.next()
 
1968
            data = next(raw_records)
1971
1969
            yield key, data
1972
1970
 
1973
1971
    def _record_to_data(self, key, digest, lines, dense_lines=None):
2024
2022
        # Note that _get_content is only called when the _ContentMapGenerator
2025
2023
        # has been constructed with just one key requested for reconstruction.
2026
2024
        if key in self.nonlocal_keys:
2027
 
            record = self.get_record_stream().next()
 
2025
            record = next(self.get_record_stream())
2028
2026
            # Create a content object on the fly
2029
2027
            lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
2030
2028
            return PlainKnitContent(lines, record.key)