109
110
revno = len(history)
110
111
for revision_id in history:
111
112
this_tree = repository.revision_tree(revision_id)
112
this_intertree = InterTree.get(this_tree, last_tree)
113
this_path = this_intertree.find_source_path(last_path)
113
this_path = find_previous_path(last_tree, this_tree, last_path)
115
115
# now we know how it was last time, and how it is in this revision.
116
116
# are those two states effectively the same or not?
155
156
:param lf: The LogFormatter object showing the output.
158
:param specific_fileid: If not None, list only the commits affecting the
159
specified file, rather than all commits.
157
161
:param verbose: If True show added/changed/deleted/renamed files.
159
163
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
174
178
:param match: Dictionary of search lists to use when matching revision
181
# Convert old-style parameters to new-style parameters
182
if specific_fileid is not None:
183
file_ids = [specific_fileid]
188
delta_type = 'partial'
180
192
delta_type = None
195
diff_type = 'partial'
186
if isinstance(start_revision, int):
188
start_revision = revisionspec.RevisionInfo(branch, start_revision)
189
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
190
raise errors.InvalidRevisionNumber(start_revision)
192
if isinstance(end_revision, int):
194
end_revision = revisionspec.RevisionInfo(branch, end_revision)
195
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
196
raise errors.InvalidRevisionNumber(end_revision)
198
if end_revision is not None and end_revision.revno == 0:
199
raise errors.InvalidRevisionNumber(end_revision.revno)
201
201
# Build the request and execute it
202
rqst = make_log_request_dict(
202
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
204
203
start_revision=start_revision, end_revision=end_revision,
205
204
limit=limit, message_search=search,
206
205
delta_type=delta_type, diff_type=diff_type)
373
372
if not isinstance(lf, LogFormatter):
374
373
warn("not a LogFormatter instance: %r" % lf)
376
with self.branch.lock_read():
375
self.branch.lock_read()
377
377
if getattr(lf, 'begin_log', None):
379
379
self._show_body(lf)
380
380
if getattr(lf, 'end_log', None):
383
385
def _show_body(self, lf):
384
386
"""Show the main log output.
408
410
lf.log_revision(lr)
409
411
except errors.GhostRevisionUnusableHere:
410
412
raise errors.BzrCommandError(
411
gettext('Further revision history missing.'))
413
gettext('Further revision history missing.'))
414
416
def _generator_factory(self, branch, rqst):
450
452
for revs in revision_iterator:
451
453
for (rev_id, revno, merge_depth), rev, delta in revs:
452
454
# 0 levels means show everything; merge_depth counts from 0
453
if (levels != 0 and merge_depth is not None and
454
merge_depth >= levels):
455
if levels != 0 and merge_depth >= levels:
456
457
if omit_merges and len(rev.parent_ids) > 1:
465
466
signature = format_signature_validity(rev_id, self.branch)
469
rev, revno, merge_depth, delta,
469
yield LogRevision(rev, revno, merge_depth, delta,
470
470
self.rev_tag_dict.get(rev_id), diff, signature)
490
490
path_encoding = get_diff_header_encoding()
491
491
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
492
new_label='', path_encoding=path_encoding)
492
new_label='', path_encoding=path_encoding)
493
493
return s.getvalue()
495
495
def _create_log_revision_iterator(self):
509
509
# not a directory
510
510
file_count = len(self.rqst.get('specific_fileids'))
511
511
if file_count != 1:
512
raise errors.BzrError(
513
"illegal LogRequest: must match-using-deltas "
512
raise BzrError("illegal LogRequest: must match-using-deltas "
514
513
"when logging %d files" % file_count)
515
514
return self._log_revision_iterator_using_per_file_graph()
520
519
generate_merge_revisions = rqst.get('levels') != 1
521
520
delayed_graph_generation = not rqst.get('specific_fileids') and (
522
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
521
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
523
522
view_revisions = _calc_view_revisions(
524
523
self.branch, self.start_rev_id, self.end_rev_id,
525
524
rqst.get('direction'),
530
529
# Apply the other filters
531
530
return make_log_rev_iterator(self.branch, view_revisions,
532
rqst.get('delta_type'), rqst.get('match'),
533
file_ids=rqst.get('specific_fileids'),
534
direction=rqst.get('direction'))
531
rqst.get('delta_type'), rqst.get('match'),
532
file_ids=rqst.get('specific_fileids'),
533
direction=rqst.get('direction'))
536
535
def _log_revision_iterator_using_per_file_graph(self):
537
536
# Get the base revisions, filtering by the revision range.
545
544
if not isinstance(view_revisions, list):
546
545
view_revisions = list(view_revisions)
547
546
view_revisions = _filter_revisions_touching_file_id(self.branch,
548
rqst.get('specific_fileids')[
550
include_merges=rqst.get('levels') != 1)
547
rqst.get('specific_fileids')[0], view_revisions,
548
include_merges=rqst.get('levels') != 1)
551
549
return make_log_rev_iterator(self.branch, view_revisions,
552
rqst.get('delta_type'), rqst.get('match'))
550
rqst.get('delta_type'), rqst.get('match'))
555
553
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
567
565
'--exclude-common-ancestry requires two different revisions'))
568
566
if direction not in ('reverse', 'forward'):
569
567
raise ValueError(gettext('invalid direction %r') % direction)
570
br_rev_id = branch.last_revision()
571
if br_rev_id == _mod_revision.NULL_REVISION:
568
br_revno, br_rev_id = branch.last_revision_info()
574
572
if (end_rev_id and start_rev_id == end_rev_id
575
573
and (not generate_merge_revisions
576
574
or not _has_merges(branch, end_rev_id))):
577
575
# If a single revision is requested, check we can handle it
578
return _generate_one_revision(branch, end_rev_id, br_rev_id,
576
return _generate_one_revision(branch, end_rev_id, br_rev_id,
580
578
if not generate_merge_revisions:
582
580
# If we only want to see linear revisions, we can iterate ...
587
585
# ancestor of the end limit, check it before outputting anything
588
586
if (direction == 'forward'
589
587
or (start_rev_id and not _is_obvious_ancestor(
590
branch, start_rev_id, end_rev_id))):
591
iter_revs = list(iter_revs)
588
branch, start_rev_id, end_rev_id))):
589
iter_revs = list(iter_revs)
592
590
if direction == 'forward':
593
591
iter_revs = reversed(iter_revs)
626
624
initial_revisions = []
627
625
if delayed_graph_generation:
629
for rev_id, revno, depth in _linear_view_revisions(
630
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
627
for rev_id, revno, depth in _linear_view_revisions(
628
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
631
629
if _has_merges(branch, rev_id):
632
630
# The end_rev_id can be nested down somewhere. We need an
633
631
# explicit ancestry check. There is an ambiguity here as we
640
638
# -- vila 20100319
641
639
graph = branch.repository.get_graph()
642
640
if (start_rev_id is not None
643
and not graph.is_ancestor(start_rev_id, end_rev_id)):
641
and not graph.is_ancestor(start_rev_id, end_rev_id)):
644
642
raise _StartNotLinearAncestor()
645
643
# Since we collected the revisions so far, we need to
646
644
# adjust end_rev_id.
655
653
# A merge was never detected so the lower revision limit can't
656
654
# be nested down somewhere
657
655
raise errors.BzrCommandError(gettext('Start revision not found in'
658
' history of end revision.'))
656
' history of end revision.'))
660
658
# We exit the loop above because we encounter a revision with merges, from
661
659
# this revision, we need to switch to _graph_view_revisions.
666
664
# make forward the exact opposite display, but showing the merge revisions
667
665
# indented at the end seems slightly nicer in that case.
668
666
view_revisions = itertools.chain(iter(initial_revisions),
669
_graph_view_revisions(branch, start_rev_id, end_rev_id,
670
rebase_initial_depths=(
671
direction == 'reverse'),
672
exclude_common_ancestry=exclude_common_ancestry))
667
_graph_view_revisions(branch, start_rev_id, end_rev_id,
668
rebase_initial_depths=(direction == 'reverse'),
669
exclude_common_ancestry=exclude_common_ancestry))
673
670
return view_revisions
707
704
# both on mainline
708
705
return start_dotted[0] <= end_dotted[0]
709
706
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
710
start_dotted[0:1] == end_dotted[0:1]):
707
start_dotted[0:1] == end_dotted[0:1]):
711
708
# both on same development line
712
709
return start_dotted[2] <= end_dotted[2]
731
728
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
732
729
is not found walking the left-hand history
731
br_revno, br_rev_id = branch.last_revision_info()
734
732
repo = branch.repository
735
733
graph = repo.get_graph()
736
734
if start_rev_id is None and end_rev_id is None:
737
if branch._format.stores_revno() or \
738
config.GlobalStack().get('calculate_revnos'):
740
br_revno, br_rev_id = branch.last_revision_info()
741
except errors.GhostRevisionsHaveNoRevno:
742
br_rev_id = branch.last_revision()
747
br_rev_id = branch.last_revision()
750
736
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
751
(_mod_revision.NULL_REVISION,))
737
(_mod_revision.NULL_REVISION,))
754
740
revision_id = next(graph_iter)
757
743
yield e.revision_id, None, None
759
except StopIteration:
762
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
763
if cur_revno is not None:
746
yield revision_id, str(cur_revno), 0
766
br_rev_id = branch.last_revision()
767
749
if end_rev_id is None:
768
750
end_rev_id = br_rev_id
769
751
found_start = start_rev_id is None
770
752
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
771
(_mod_revision.NULL_REVISION,))
753
(_mod_revision.NULL_REVISION,))
774
756
revision_id = next(graph_iter)
841
823
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
842
824
min_depth = min([d for r, n, d in view_revisions])
843
825
if min_depth != 0:
844
view_revisions = [(r, n, d - min_depth)
845
for r, n, d in view_revisions]
826
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
846
827
return view_revisions
849
830
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
850
file_ids=None, direction='reverse'):
831
file_ids=None, direction='reverse'):
851
832
"""Create a revision iterator for log.
853
834
:param branch: The branch being logged.
877
858
# It would be nicer if log adapters were first class objects
878
859
# with custom parameters. This will do for now. IGC 20090127
879
860
if adapter == _make_delta_filter:
880
log_rev_iterator = adapter(
881
branch, generate_delta, search, log_rev_iterator, file_ids,
861
log_rev_iterator = adapter(branch, generate_delta,
862
search, log_rev_iterator, file_ids, direction)
884
log_rev_iterator = adapter(
885
branch, generate_delta, search, log_rev_iterator)
864
log_rev_iterator = adapter(branch, generate_delta,
865
search, log_rev_iterator)
886
866
return log_rev_iterator
904
884
return log_rev_iterator
905
# Use lazy_compile so mapping to InvalidPattern error occurs.
906
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
885
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
907
886
for k, v in match.items()]
908
887
return _filter_re(searchRE, log_rev_iterator)
918
896
def _match_filter(searchRE, rev):
920
'message': (rev.message,),
921
'committer': (rev.committer,),
922
'author': (rev.get_apparent_authors()),
923
'bugs': list(rev.iter_bugs())
898
'message': (rev.message,),
899
'committer': (rev.committer,),
900
'author': (rev.get_apparent_authors()),
901
'bugs': list(rev.iter_bugs())
925
903
strings[''] = [item for inner_list in strings.values()
926
904
for item in inner_list]
927
for k, v in searchRE:
905
for (k, v) in searchRE:
928
906
if k in strings and not _match_any_filter(strings[k], v):
933
910
def _match_any_filter(strings, res):
934
return any(r.search(s) for r in res for s in strings)
911
return any(re.search(s) for re in res for s in strings)
937
913
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
938
fileids=None, direction='reverse'):
914
fileids=None, direction='reverse'):
939
915
"""Add revision deltas to a log iterator if needed.
941
917
:param branch: The branch being logged.
953
929
if not generate_delta and not fileids:
954
930
return log_rev_iterator
955
931
return _generate_deltas(branch.repository, log_rev_iterator,
956
generate_delta, fileids, direction)
932
generate_delta, fileids, direction)
959
935
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
961
937
"""Create deltas for each batch of revisions in log_rev_iterator.
963
939
If we're only generating deltas for the sake of filtering against
1017
993
fileids set once their add or remove entry is detected respectively
1019
995
if stop_on == 'add':
1020
for item in delta.added + delta.copied:
1021
if item.file_id in fileids:
1022
fileids.remove(item.file_id)
996
for item in delta.added:
997
if item[1] in fileids:
998
fileids.remove(item[1])
1023
999
elif stop_on == 'delete':
1024
1000
for item in delta.removed:
1025
if item.file_id in fileids:
1026
fileids.remove(item.file_id)
1001
if item[1] in fileids:
1002
fileids.remove(item[1])
1029
1005
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1073
1049
:param branch: The branch containing the revisions.
1075
1051
:param start_revision: The first revision to be logged.
1052
For backwards compatibility this may be a mainline integer revno,
1076
1053
but for merge revision support a RevisionInfo is expected.
1078
1055
:param end_revision: The last revision to be logged.
1082
1059
:return: (start_rev_id, end_rev_id) tuple.
1061
branch_revno, branch_rev_id = branch.last_revision_info()
1084
1062
start_rev_id = None
1086
if start_revision is not None:
1087
if not isinstance(start_revision, revisionspec.RevisionInfo):
1088
raise TypeError(start_revision)
1089
start_rev_id = start_revision.rev_id
1090
start_revno = start_revision.revno
1091
if start_revno is None:
1063
if start_revision is None:
1092
1064
start_revno = 1
1066
if isinstance(start_revision, revisionspec.RevisionInfo):
1067
start_rev_id = start_revision.rev_id
1068
start_revno = start_revision.revno or 1
1070
branch.check_real_revno(start_revision)
1071
start_revno = start_revision
1072
start_rev_id = branch.get_rev_id(start_revno)
1094
1074
end_rev_id = None
1096
if end_revision is not None:
1097
if not isinstance(end_revision, revisionspec.RevisionInfo):
1098
raise TypeError(start_revision)
1099
end_rev_id = end_revision.rev_id
1100
end_revno = end_revision.revno
1075
if end_revision is None:
1076
end_revno = branch_revno
1078
if isinstance(end_revision, revisionspec.RevisionInfo):
1079
end_rev_id = end_revision.rev_id
1080
end_revno = end_revision.revno or branch_revno
1082
branch.check_real_revno(end_revision)
1083
end_revno = end_revision
1084
end_rev_id = branch.get_rev_id(end_revno)
1102
if branch.last_revision() != _mod_revision.NULL_REVISION:
1086
if branch_revno != 0:
1103
1087
if (start_rev_id == _mod_revision.NULL_REVISION
1104
or end_rev_id == _mod_revision.NULL_REVISION):
1105
raise errors.BzrCommandError(
1106
gettext('Logging revision 0 is invalid.'))
1107
if end_revno is not None and start_revno > end_revno:
1108
raise errors.BzrCommandError(
1109
gettext("Start revision must be older than the end revision."))
1088
or end_rev_id == _mod_revision.NULL_REVISION):
1089
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1090
if start_revno > end_revno:
1091
raise errors.BzrCommandError(gettext("Start revision must be "
1092
"older than the end revision."))
1110
1093
return (start_rev_id, end_rev_id)
1160
1143
end_revno = end_revision
1162
1145
if ((start_rev_id == _mod_revision.NULL_REVISION)
1163
or (end_rev_id == _mod_revision.NULL_REVISION)):
1146
or (end_rev_id == _mod_revision.NULL_REVISION)):
1164
1147
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1165
1148
if start_revno > end_revno:
1166
1149
raise errors.BzrCommandError(gettext("Start revision must be older "
1167
"than the end revision."))
1150
"than the end revision."))
1169
1152
if end_revno < start_revno:
1170
1153
return None, None, None, None
1281
1264
"""Reverse revisions by depth.
1283
1266
Revisions with a different depth are sorted as a group with the previous
1284
revision of that depth. There may be no topological justification for this
1267
revision of that depth. There may be no topological justification for this,
1285
1268
but it looks much nicer.
1287
1270
# Add a fake revision at start so that we can always attach sub revisions
1395
1378
self.to_file = to_file
1396
1379
# 'exact' stream used to show diff, it should print content 'as is'
1397
# and should not try to decode/encode it to unicode to avoid bug
1380
# and should not try to decode/encode it to unicode to avoid bug #328007
1399
1381
if to_exact_file is not None:
1400
1382
self.to_exact_file = to_exact_file
1402
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1403
# better for code that expects to get diffs to pass in the exact
1384
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1385
# for code that expects to get diffs to pass in the exact file
1405
1387
self.to_exact_file = getattr(to_file, 'stream', to_file)
1406
1388
self.show_ids = show_ids
1407
1389
self.show_timezone = show_timezone
1408
1390
if delta_format is None:
1409
1391
# Ensures backward compatibility
1410
delta_format = 2 # long format
1392
delta_format = 2 # long format
1411
1393
self.delta_format = delta_format
1412
1394
self.levels = levels
1413
1395
self._show_advice = show_advice
1512
1494
lines = self._foreign_info_properties(revision)
1513
1495
for key, handler in properties_handler_registry.iteritems():
1515
lines.extend(self._format_properties(handler(revision)))
1517
trace.log_exception_quietly()
1518
trace.print_exception(sys.exc_info(), self.to_file)
1496
lines.extend(self._format_properties(handler(revision)))
1521
1499
def _foreign_info_properties(self, rev):
1529
1507
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1531
1509
# Imported foreign revision revision ids always contain :
1532
if b":" not in rev.revision_id:
1510
if not ":" in rev.revision_id:
1535
1513
# Revision was once imported from a foreign repository
1551
1529
def show_diff(self, to_file, diff, indent):
1552
encoding = get_terminal_encoding()
1553
for l in diff.rstrip().split(b'\n'):
1554
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1530
for l in diff.rstrip().split('\n'):
1531
to_file.write(indent + '%s\n' % (l,))
1557
1534
# Separator between revisions in long format
1581
1558
def _date_string_original_timezone(self, rev):
1582
1559
return format_date_with_offset_in_original_timezone(rev.timestamp,
1585
1562
def log_revision(self, revision):
1586
1563
"""Log a revision, either merged or not."""
1588
1565
lines = [_LONG_SEP]
1589
1566
if revision.revno is not None:
1590
1567
lines.append('revno: %s%s' % (revision.revno,
1591
self.merge_marker(revision)))
1568
self.merge_marker(revision)))
1592
1569
if revision.tags:
1593
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1570
lines.append('tags: %s' % (', '.join(revision.tags)))
1594
1571
if self.show_ids or revision.revno is None:
1595
lines.append('revision-id: %s' %
1596
(revision.rev.revision_id.decode('utf-8'),))
1572
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1597
1573
if self.show_ids:
1598
1574
for parent_id in revision.rev.parent_ids:
1599
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1575
lines.append('parent: %s' % (parent_id,))
1600
1576
lines.extend(self.custom_properties(revision.rev))
1602
1578
committer = revision.rev.committer
1678
1654
to_file = self.to_file
1680
1656
if revision.tags:
1681
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1657
tags = ' {%s}' % (', '.join(revision.tags))
1682
1658
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1683
revision.revno or "", self.short_author(
1685
format_date(revision.rev.timestamp,
1686
revision.rev.timezone or 0,
1687
self.show_timezone, date_fmt="%Y-%m-%d",
1689
tags, self.merge_marker(revision)))
1690
self.show_properties(revision.rev, indent + offset)
1659
revision.revno or "", self.short_author(revision.rev),
1660
format_date(revision.rev.timestamp,
1661
revision.rev.timezone or 0,
1662
self.show_timezone, date_fmt="%Y-%m-%d",
1664
tags, self.merge_marker(revision)))
1665
self.show_properties(revision.rev, indent+offset)
1691
1666
if self.show_ids or revision.revno is None:
1692
1667
to_file.write(indent + offset + 'revision-id:%s\n'
1693
% (revision.rev.revision_id.decode('utf-8'),))
1668
% (revision.rev.revision_id,))
1694
1669
if not revision.rev.message:
1695
1670
to_file.write(indent + offset + '(no message)\n')
1702
1677
# Use the standard status output to display changes
1703
1678
from breezy.delta import report_delta
1704
1679
report_delta(to_file, revision.delta,
1705
short_status=self.delta_format == 1,
1680
short_status=self.delta_format==1,
1706
1681
show_ids=self.show_ids, indent=indent + offset)
1707
1682
if revision.diff is not None:
1708
1683
self.show_diff(self.to_exact_file, revision.diff, ' ')
1726
1701
def truncate(self, str, max_len):
1727
1702
if max_len is None or len(str) <= max_len:
1729
return str[:max_len - 3] + '...'
1704
return str[:max_len-3] + '...'
1731
1706
def date_string(self, rev):
1732
1707
return format_date(rev.timestamp, rev.timezone or 0,
1742
1717
def log_revision(self, revision):
1743
1718
indent = ' ' * revision.merge_depth
1744
1719
self.to_file.write(self.log_string(revision.revno, revision.rev,
1745
self._max_chars, revision.tags, indent))
1720
self._max_chars, revision.tags, indent))
1746
1721
self.to_file.write('\n')
1748
1723
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1761
1736
# show revno only when is not None
1762
1737
out.append("%s:" % revno)
1763
1738
if max_chars is not None:
1764
out.append(self.truncate(
1765
self.short_author(rev), (max_chars + 3) // 4))
1739
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1767
1741
out.append(self.short_author(rev))
1768
1742
out.append(self.date_string(rev))
1769
1743
if len(rev.parent_ids) > 1:
1770
1744
out.append('[merge]')
1772
tag_str = '{%s}' % (', '.join(sorted(tags)))
1746
tag_str = '{%s}' % (', '.join(tags))
1773
1747
out.append(tag_str)
1774
1748
out.append(rev.get_summary())
1775
1749
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1796
1770
if revision.delta is not None and revision.delta.has_changed():
1797
1771
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1798
if c.path[0] is None:
1802
1773
to_file.write('\t* %s:\n' % (path,))
1803
for c in revision.delta.renamed + revision.delta.copied:
1774
for c in revision.delta.renamed:
1775
oldpath, newpath = c[:2]
1804
1776
# For renamed files, show both the old and the new path
1805
to_file.write('\t* %s:\n\t* %s:\n' % (c.path[0], c.path[1]))
1777
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1806
1778
to_file.write('\n')
1808
1780
if not revision.rev.message:
1862
1834
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1863
1835
except KeyError:
1864
raise errors.BzrCommandError(
1865
gettext("unknown log formatter: %r") % name)
1836
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1868
1839
def author_list_all(rev):
1916
1887
for i in range(max(len(new_rh), len(old_rh))):
1917
1888
if (len(new_rh) <= i
1918
1889
or len(old_rh) <= i
1919
or new_rh[i] != old_rh[i]):
1890
or new_rh[i] != old_rh[i]):
1923
1894
if base_idx is None:
1924
1895
to_file.write('Nothing seems to have changed\n')
1926
# TODO: It might be nice to do something like show_log
1927
# and show the merged entries. But since this is the
1928
# removed revisions, it shouldn't be as important
1897
## TODO: It might be nice to do something like show_log
1898
## and show the merged entries. But since this is the
1899
## removed revisions, it shouldn't be as important
1929
1900
if base_idx < len(old_rh):
1930
to_file.write('*' * 60)
1901
to_file.write('*'*60)
1931
1902
to_file.write('\nRemoved Revisions:\n')
1932
1903
for i in range(base_idx, len(old_rh)):
1933
1904
rev = branch.repository.get_revision(old_rh[i])
1934
lr = LogRevision(rev, i + 1, 0, None)
1905
lr = LogRevision(rev, i+1, 0, None)
1935
1906
lf.log_revision(lr)
1936
to_file.write('*' * 60)
1907
to_file.write('*'*60)
1937
1908
to_file.write('\n\n')
1938
1909
if base_idx < len(new_rh):
1939
1910
to_file.write('Added Revisions:\n')
1940
1911
show_log(branch,
1943
1915
direction='forward',
1944
start_revision=base_idx + 1,
1916
start_revision=base_idx+1,
1945
1917
end_revision=len(new_rh),
2015
1987
log_format = log_formatter_registry.get_default(branch)
2016
1988
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2017
1989
if old_history != []:
2018
output.write('*' * 60)
1990
output.write('*'*60)
2019
1991
output.write('\nRemoved Revisions:\n')
2020
1992
show_flat_log(branch.repository, old_history, old_revno, lf)
2021
output.write('*' * 60)
1993
output.write('*'*60)
2022
1994
output.write('\n\n')
2023
1995
if new_history != []:
2024
1996
output.write('Added Revisions:\n')
2025
1997
start_revno = new_revno - len(new_history) + 1
2026
show_log(branch, lf, verbose=False, direction='forward',
2027
start_revision=start_revno)
1998
show_log(branch, lf, None, verbose=False, direction='forward',
1999
start_revision=start_revno,)
2030
2002
def show_flat_log(repository, history, last_revno, lf):
2035
2007
:param last_revno: The revno of the last revision_id in the history.
2036
2008
:param lf: The log formatter to use.
2010
start_revno = last_revno - len(history) + 1
2038
2011
revisions = repository.get_revisions(history)
2039
2012
for i, rev in enumerate(revisions):
2040
2013
lr = LogRevision(rev, i + last_revno, 0, None)
2041
2014
lf.log_revision(lr)
2044
def _get_info_for_log_files(revisionspec_list, file_list, exit_stack):
2017
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2045
2018
"""Find file-ids and kinds given a list of files and a revision range.
2047
2020
We search for files at the end of the range. If not found there,
2051
2024
:param file_list: the list of paths given on the command line;
2052
2025
the first of these can be a branch location or a file path,
2053
2026
the remainder must be file paths
2054
:param exit_stack: When the branch returned is read locked,
2055
an unlock call will be queued to the exit stack.
2027
:param add_cleanup: When the branch returned is read locked,
2028
an unlock call will be queued to the cleanup.
2056
2029
:return: (branch, info_list, start_rev_info, end_rev_info) where
2057
2030
info_list is a list of (relative_path, file_id, kind) tuples where
2058
2031
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2061
2034
from breezy.builtins import _get_revision_range
2062
2035
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2064
exit_stack.enter_context(b.lock_read())
2037
add_cleanup(b.lock_read().unlock)
2065
2038
# XXX: It's damn messy converting a list of paths to relative paths when
2066
2039
# those paths might be deleted ones, they might be on a case-insensitive
2067
2040
# filesystem and/or they might be in silly locations (like another branch).
2076
2049
relpaths = [path] + file_list[1:]
2078
2051
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2080
2053
if relpaths in ([], [u'']):
2081
2054
return b, [], start_rev_info, end_rev_info
2082
2055
if start_rev_info is None and end_rev_info is None:
2147
2120
properties_handler_registry = registry.Registry()
2149
2122
# Use the properties handlers to print out bug information if available
2152
2123
def _bugs_properties_handler(revision):
2154
related_bug_urls = []
2155
for bug_url, status in revision.iter_bugs():
2156
if status == 'fixed':
2157
fixed_bug_urls.append(bug_url)
2158
elif status == 'related':
2159
related_bug_urls.append(bug_url)
2162
text = ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))
2163
ret[text] = ' '.join(fixed_bug_urls)
2164
if related_bug_urls:
2165
text = ngettext('related bug', 'related bugs',
2166
len(related_bug_urls))
2167
ret[text] = ' '.join(related_bug_urls)
2124
if 'bugs' in revision.properties:
2125
bug_lines = revision.properties['bugs'].split('\n')
2126
bug_rows = [line.split(' ', 1) for line in bug_lines]
2127
fixed_bug_urls = [row[0] for row in bug_rows if
2128
len(row) > 1 and row[1] == 'fixed']
2131
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2132
' '.join(fixed_bug_urls)}
2171
2135
properties_handler_registry.register('bugs_properties_handler',
2172
2136
_bugs_properties_handler)