49
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
53
from cStringIO import StringIO
54
from itertools import (
60
56
from warnings import (
64
from bzrlib.lazy_import import lazy_import
60
from .lazy_import import lazy_import
65
61
lazy_import(globals(), """
73
68
repository as _mod_repository,
74
69
revision as _mod_revision,
72
from breezy.i18n import gettext, ngettext
84
from bzrlib.osutils import (
81
from .osutils import (
86
83
format_date_with_offset_in_original_timezone,
84
get_diff_header_encoding,
87
85
get_terminal_encoding,
91
from bzrlib.symbol_versioning import (
97
def find_touching_revisions(branch, file_id):
93
from .tree import find_previous_path
96
def find_touching_revisions(repository, last_revision, last_tree, last_path):
98
97
"""Yield a description of revisions which affect the file_id.
100
99
Each returned element is (revno, revision_id, description)
105
104
TODO: Perhaps some way to limit this to only particular revisions,
106
105
or to traverse a non-mainline set of revisions?
111
for revision_id in branch.revision_history():
112
this_inv = branch.repository.get_inventory(revision_id)
113
if file_id in this_inv:
114
this_ie = this_inv[file_id]
115
this_path = this_inv.id2path(file_id)
117
this_ie = this_path = None
107
last_verifier = last_tree.get_file_verifier(last_path)
108
graph = repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(last_revision, []))
111
for revision_id in history:
112
this_tree = repository.revision_tree(revision_id)
113
this_path = find_previous_path(last_tree, this_tree, last_path)
119
115
# now we know how it was last time, and how it is in this revision.
120
116
# are those two states effectively the same or not?
122
if not this_ie and not last_ie:
123
# not present in either
125
elif this_ie and not last_ie:
126
yield revno, revision_id, "added " + this_path
127
elif not this_ie and last_ie:
129
yield revno, revision_id, "deleted " + last_path
117
if this_path is not None and last_path is None:
118
yield revno, revision_id, "deleted " + this_path
119
this_verifier = this_tree.get_file_verifier(this_path)
120
elif this_path is None and last_path is not None:
121
yield revno, revision_id, "added " + last_path
130
122
elif this_path != last_path:
131
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
132
elif (this_ie.text_size != last_ie.text_size
133
or this_ie.text_sha1 != last_ie.text_sha1):
134
yield revno, revision_id, "modified " + this_path
123
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
124
this_verifier = this_tree.get_file_verifier(this_path)
126
this_verifier = this_tree.get_file_verifier(this_path)
127
if (this_verifier != last_verifier):
128
yield revno, revision_id, "modified " + this_path
130
last_verifier = this_verifier
137
131
last_path = this_path
141
def _enumerate_history(branch):
144
for rev_id in branch.revision_history():
145
rh.append((revno, rev_id))
132
last_tree = this_tree
133
if last_path is None:
150
138
def show_log(branch,
201
if isinstance(start_revision, int):
203
start_revision = revisionspec.RevisionInfo(branch, start_revision)
204
except errors.NoSuchRevision:
205
raise errors.InvalidRevisionNumber(start_revision)
207
if isinstance(end_revision, int):
209
end_revision = revisionspec.RevisionInfo(branch, end_revision)
210
except errors.NoSuchRevision:
211
raise errors.InvalidRevisionNumber(end_revision)
213
if end_revision is not None and end_revision.revno == 0:
214
raise errors.InvalidRevisionNumber(end_revision.revno)
209
216
# Build the request and execute it
210
217
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
211
218
start_revision=start_revision, end_revision=end_revision,
228
235
def make_log_request_dict(direction='reverse', specific_fileids=None,
229
236
start_revision=None, end_revision=None, limit=None,
230
message_search=None, levels=1, generate_tags=True,
237
message_search=None, levels=None, generate_tags=True,
232
239
diff_type=None, _match_using_deltas=True,
233
exclude_common_ancestry=False,
240
exclude_common_ancestry=False, match=None,
241
signature=False, omit_merges=False,
235
243
"""Convenience function for making a logging request dictionary.
274
283
:param _match_using_deltas: a private parameter controlling the
275
284
algorithm used for matching specific_fileids. This parameter
276
may be removed in the future so bzrlib client code should NOT
285
may be removed in the future so breezy client code should NOT
279
288
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
280
289
range operator or as a graph difference.
291
:param signature: show digital signature information
293
:param match: Dictionary of list of search strings to use when filtering
294
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
295
the empty string to match any of the preceding properties.
297
:param omit_merges: If True, commits with more than one parent are
301
# Take care of old style message_search parameter
304
if 'message' in match:
305
match['message'].append(message_search)
307
match['message'] = [message_search]
309
match= {'message': [message_search]}
283
311
'direction': direction,
284
312
'specific_fileids': specific_fileids,
285
313
'start_revision': start_revision,
286
314
'end_revision': end_revision,
288
'message_search': message_search,
289
316
'levels': levels,
290
317
'generate_tags': generate_tags,
291
318
'delta_type': delta_type,
292
319
'diff_type': diff_type,
293
320
'exclude_common_ancestry': exclude_common_ancestry,
321
'signature': signature,
323
'omit_merges': omit_merges,
294
324
# Add 'private' attributes for features that may be deprecated
295
325
'_match_using_deltas': _match_using_deltas,
299
329
def _apply_log_request_defaults(rqst):
300
330
"""Apply default values to a request dictionary."""
301
result = _DEFAULT_REQUEST_PARAMS
331
result = _DEFAULT_REQUEST_PARAMS.copy()
303
333
result.update(rqst)
337
def format_signature_validity(rev_id, branch):
338
"""get the signature validity
340
:param rev_id: revision id to validate
341
:param branch: branch of revision
342
:return: human readable string to print to log
344
from breezy import gpg
346
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
347
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
348
if result[0] == gpg.SIGNATURE_VALID:
349
return u"valid signature from {0}".format(result[1])
350
if result[0] == gpg.SIGNATURE_KEY_MISSING:
351
return "unknown key {0}".format(result[1])
352
if result[0] == gpg.SIGNATURE_NOT_VALID:
353
return "invalid signature!"
354
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
355
return "no signature"
307
358
class LogGenerator(object):
308
359
"""A generator of log revisions."""
336
387
if not isinstance(lf, LogFormatter):
337
388
warn("not a LogFormatter instance: %r" % lf)
339
self.branch.lock_read()
390
with self.branch.lock_read():
341
391
if getattr(lf, 'begin_log', None):
343
393
self._show_body(lf)
344
394
if getattr(lf, 'end_log', None):
349
397
def _show_body(self, lf):
350
398
"""Show the main log output.
354
402
# Tweak the LogRequest based on what the LogFormatter can handle.
355
403
# (There's no point generating stuff if the formatter can't display it.)
357
rqst['levels'] = lf.get_levels()
405
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
406
# user didn't specify levels, use whatever the LF can handle:
407
rqst['levels'] = lf.get_levels()
358
409
if not getattr(lf, 'supports_tags', False):
359
410
rqst['generate_tags'] = False
360
411
if not getattr(lf, 'supports_delta', False):
361
412
rqst['delta_type'] = None
362
413
if not getattr(lf, 'supports_diff', False):
363
414
rqst['diff_type'] = None
415
if not getattr(lf, 'supports_signatures', False):
416
rqst['signature'] = False
365
418
# Find and print the interesting revisions
366
419
generator = self._generator_factory(self.branch, rqst)
367
for lr in generator.iter_log_revisions():
421
for lr in generator.iter_log_revisions():
423
except errors.GhostRevisionUnusableHere:
424
raise errors.BzrCommandError(
425
gettext('Further revision history missing.'))
371
428
def _generator_factory(self, branch, rqst):
372
429
"""Make the LogGenerator object to use.
374
431
Subclasses may wish to override this.
376
433
return _DefaultLogGenerator(branch, rqst)
400
457
levels = rqst.get('levels')
401
458
limit = rqst.get('limit')
402
459
diff_type = rqst.get('diff_type')
460
show_signature = rqst.get('signature')
461
omit_merges = rqst.get('omit_merges')
404
463
revision_iterator = self._create_log_revision_iterator()
405
464
for revs in revision_iterator:
406
465
for (rev_id, revno, merge_depth), rev, delta in revs:
407
466
# 0 levels means show everything; merge_depth counts from 0
408
if levels != 0 and merge_depth >= levels:
467
if levels != 0 and merge_depth is not None and merge_depth >= levels:
469
if omit_merges and len(rev.parent_ids) > 1:
472
raise errors.GhostRevisionUnusableHere(rev_id)
410
473
if diff_type is None:
413
476
diff = self._format_diff(rev, rev_id, diff_type)
478
signature = format_signature_validity(rev_id, self.branch)
414
481
yield LogRevision(rev, revno, merge_depth, delta,
415
self.rev_tag_dict.get(rev_id), diff)
482
self.rev_tag_dict.get(rev_id), diff, signature)
418
485
if log_count >= limit:
491
559
rqst.get('specific_fileids')[0], view_revisions,
492
560
include_merges=rqst.get('levels') != 1)
493
561
return make_log_rev_iterator(self.branch, view_revisions,
494
rqst.get('delta_type'), rqst.get('message_search'))
562
rqst.get('delta_type'), rqst.get('match'))
497
565
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
505
573
a list of the same tuples.
507
575
if (exclude_common_ancestry and start_rev_id == end_rev_id):
508
raise errors.BzrCommandError(
509
'--exclude-common-ancestry requires two different revisions')
576
raise errors.BzrCommandError(gettext(
577
'--exclude-common-ancestry requires two different revisions'))
510
578
if direction not in ('reverse', 'forward'):
511
raise ValueError('invalid direction %r' % direction)
512
br_revno, br_rev_id = branch.last_revision_info()
579
raise ValueError(gettext('invalid direction %r') % direction)
580
br_rev_id = branch.last_revision()
581
if br_rev_id == _mod_revision.NULL_REVISION:
516
584
if (end_rev_id and start_rev_id == end_rev_id
517
585
and (not generate_merge_revisions
518
586
or not _has_merges(branch, end_rev_id))):
519
587
# If a single revision is requested, check we can handle it
520
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
522
elif not generate_merge_revisions:
523
# If we only want to see linear revisions, we can iterate ...
524
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
526
if direction == 'forward':
527
iter_revs = reversed(iter_revs)
529
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
530
direction, delayed_graph_generation,
531
exclude_common_ancestry)
532
if direction == 'forward':
533
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
588
return _generate_one_revision(branch, end_rev_id, br_rev_id,
590
if not generate_merge_revisions:
592
# If we only want to see linear revisions, we can iterate ...
593
iter_revs = _linear_view_revisions(
594
branch, start_rev_id, end_rev_id,
595
exclude_common_ancestry=exclude_common_ancestry)
596
# If a start limit was given and it's not obviously an
597
# ancestor of the end limit, check it before outputting anything
598
if (direction == 'forward'
599
or (start_rev_id and not _is_obvious_ancestor(
600
branch, start_rev_id, end_rev_id))):
601
iter_revs = list(iter_revs)
602
if direction == 'forward':
603
iter_revs = reversed(iter_revs)
605
except _StartNotLinearAncestor:
606
# Switch to the slower implementation that may be able to find a
607
# non-obvious ancestor out of the left-hand history.
609
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
610
direction, delayed_graph_generation,
611
exclude_common_ancestry)
612
if direction == 'forward':
613
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
540
620
return [(br_rev_id, br_revno, 0)]
542
revno = branch.revision_id_to_dotted_revno(rev_id)
543
revno_str = '.'.join(str(n) for n in revno)
622
revno_str = _compute_revno_str(branch, rev_id)
544
623
return [(rev_id, revno_str, 0)]
547
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
548
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
549
# If a start limit was given and it's not obviously an
550
# ancestor of the end limit, check it before outputting anything
551
if direction == 'forward' or (start_rev_id
552
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
554
result = list(result)
555
except _StartNotLinearAncestor:
556
raise errors.BzrCommandError('Start revision not found in'
557
' left-hand history of end revision.')
561
626
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
562
627
delayed_graph_generation,
563
628
exclude_common_ancestry=False):
572
637
if delayed_graph_generation:
574
639
for rev_id, revno, depth in _linear_view_revisions(
575
branch, start_rev_id, end_rev_id):
640
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
576
641
if _has_merges(branch, rev_id):
577
642
# The end_rev_id can be nested down somewhere. We need an
578
643
# explicit ancestry check. There is an ambiguity here as we
599
664
except _StartNotLinearAncestor:
600
665
# A merge was never detected so the lower revision limit can't
601
666
# be nested down somewhere
602
raise errors.BzrCommandError('Start revision not found in'
603
' history of end revision.')
667
raise errors.BzrCommandError(gettext('Start revision not found in'
668
' history of end revision.'))
605
670
# We exit the loop above because we encounter a revision with merges, from
606
671
# this revision, we need to switch to _graph_view_revisions.
610
675
# shown naturally, i.e. just like it is for linear logging. We can easily
611
676
# make forward the exact opposite display, but showing the merge revisions
612
677
# indented at the end seems slightly nicer in that case.
613
view_revisions = chain(iter(initial_revisions),
678
view_revisions = itertools.chain(iter(initial_revisions),
614
679
_graph_view_revisions(branch, start_rev_id, end_rev_id,
615
680
rebase_initial_depths=(direction == 'reverse'),
616
681
exclude_common_ancestry=exclude_common_ancestry))
623
688
return len(parents) > 1
691
def _compute_revno_str(branch, rev_id):
692
"""Compute the revno string from a rev_id.
694
:return: The revno string, or None if the revision is not in the supplied
698
revno = branch.revision_id_to_dotted_revno(rev_id)
699
except errors.NoSuchRevision:
700
# The revision must be outside of this branch
703
return '.'.join(str(n) for n in revno)
626
706
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
627
707
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
628
708
if start_rev_id and end_rev_id:
629
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
630
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
710
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
711
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
712
except errors.NoSuchRevision:
713
# one or both is not in the branch; not obvious
631
715
if len(start_dotted) == 1 and len(end_dotted) == 1:
632
716
# both on mainline
633
717
return start_dotted[0] <= end_dotted[0]
646
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
730
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
731
exclude_common_ancestry=False):
647
732
"""Calculate a sequence of revisions to view, newest to oldest.
649
734
:param start_rev_id: the lower revision-id
650
735
:param end_rev_id: the upper revision-id
736
:param exclude_common_ancestry: Whether the start_rev_id should be part of
737
the iterated revisions.
651
738
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
739
dotted_revno will be None for ghosts
652
740
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
653
is not found walking the left-hand history
741
is not found walking the left-hand history
655
br_revno, br_rev_id = branch.last_revision_info()
656
743
repo = branch.repository
744
graph = repo.get_graph()
657
745
if start_rev_id is None and end_rev_id is None:
659
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
660
yield revision_id, str(cur_revno), 0
747
br_revno, br_rev_id = branch.last_revision_info()
748
except errors.GhostRevisionsHaveNoRevno:
749
br_rev_id = branch.last_revision()
753
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
754
(_mod_revision.NULL_REVISION,))
757
revision_id = next(graph_iter)
758
except errors.RevisionNotPresent as e:
760
yield e.revision_id, None, None
762
except StopIteration:
765
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
766
if cur_revno is not None:
769
br_rev_id = branch.last_revision()
663
770
if end_rev_id is None:
664
771
end_rev_id = br_rev_id
665
772
found_start = start_rev_id is None
666
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
667
revno = branch.revision_id_to_dotted_revno(revision_id)
668
revno_str = '.'.join(str(n) for n in revno)
669
if not found_start and revision_id == start_rev_id:
670
yield revision_id, revno_str, 0
773
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
774
(_mod_revision.NULL_REVISION,))
777
revision_id = next(graph_iter)
778
except StopIteration:
780
except errors.RevisionNotPresent as e:
782
yield e.revision_id, None, None
674
yield revision_id, revno_str, 0
677
raise _StartNotLinearAncestor()
785
revno_str = _compute_revno_str(branch, revision_id)
786
if not found_start and revision_id == start_rev_id:
787
if not exclude_common_ancestry:
788
yield revision_id, revno_str, 0
792
yield revision_id, revno_str, 0
794
raise _StartNotLinearAncestor()
680
797
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
721
838
yield rev_id, '.'.join(map(str, revno)), merge_depth
724
@deprecated_function(deprecated_in((2, 2, 0)))
725
def calculate_view_revisions(branch, start_revision, end_revision, direction,
726
specific_fileid, generate_merge_revisions):
727
"""Calculate the revisions to view.
729
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
730
a list of the same tuples.
732
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
734
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
735
direction, generate_merge_revisions or specific_fileid))
737
view_revisions = _filter_revisions_touching_file_id(branch,
738
specific_fileid, view_revisions,
739
include_merges=generate_merge_revisions)
740
return _rebase_merge_depth(view_revisions)
743
841
def _rebase_merge_depth(view_revisions):
744
842
"""Adjust depths upwards so the top level is 0."""
745
843
# If either the first or last revision have a merge_depth of 0, we're done
746
844
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
747
min_depth = min([d for r,n,d in view_revisions])
845
min_depth = min([d for r, n, d in view_revisions])
748
846
if min_depth != 0:
749
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
847
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
750
848
return view_revisions
768
866
# Convert view_revisions into (view, None, None) groups to fit with
769
867
# the standard interface here.
770
if type(view_revisions) == list:
868
if isinstance(view_revisions, list):
771
869
# A single batch conversion is faster than many incremental ones.
772
870
# As we have all the data, do a batch conversion.
773
871
nones = [None] * len(view_revisions)
774
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
872
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
777
875
for view in view_revisions:
789
887
return log_rev_iterator
792
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
890
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
793
891
"""Create a filtered iterator of log_rev_iterator matching on a regex.
795
893
:param branch: The branch being logged.
796
894
:param generate_delta: Whether to generate a delta for each revision.
797
:param search: A user text search string.
895
:param match: A dictionary with properties as keys and lists of strings
896
as values. To match, a revision may match any of the supplied strings
897
within a single property but must match at least one string for each
798
899
:param log_rev_iterator: An input iterator containing all revisions that
799
900
could be displayed, in lists.
800
901
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
804
905
return log_rev_iterator
805
searchRE = re_compile_checked(search, re.IGNORECASE,
806
'log message filter')
807
return _filter_message_re(searchRE, log_rev_iterator)
810
def _filter_message_re(searchRE, log_rev_iterator):
906
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
907
for k, v in match.items()]
908
return _filter_re(searchRE, log_rev_iterator)
911
def _filter_re(searchRE, log_rev_iterator):
811
912
for revs in log_rev_iterator:
813
for (rev_id, revno, merge_depth), rev, delta in revs:
814
if searchRE.search(rev.message):
815
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
913
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
917
def _match_filter(searchRE, rev):
919
'message': (rev.message,),
920
'committer': (rev.committer,),
921
'author': (rev.get_apparent_authors()),
922
'bugs': list(rev.iter_bugs())
924
strings[''] = [item for inner_list in strings.values()
925
for item in inner_list]
926
for (k, v) in searchRE:
927
if k in strings and not _match_any_filter(strings[k], v):
931
def _match_any_filter(strings, res):
932
return any(re.search(s) for re in res for s in strings)
819
934
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
820
935
fileids=None, direction='reverse'):
866
981
if delta_type == 'full' and not check_fileids:
867
982
deltas = repository.get_deltas_for_revisions(revisions)
868
for rev, delta in izip(revs, deltas):
983
for rev, delta in zip(revs, deltas):
869
984
new_revs.append((rev[0], rev[1], delta))
871
986
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
872
for rev, delta in izip(revs, deltas):
987
for rev, delta in zip(revs, deltas):
873
988
if check_fileids:
874
989
if delta is None or not delta.has_changed():
923
1038
for revs in log_rev_iterator:
924
1039
# r = revision_id, n = revno, d = merge depth
925
1040
revision_ids = [view[0] for view, _, _ in revs]
926
revisions = repository.get_revisions(revision_ids)
927
revs = [(rev[0], revision, rev[2]) for rev, revision in
928
izip(revs, revisions)]
1041
revisions = dict(repository.iter_revisions(revision_ids))
1042
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
932
1045
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
968
1079
:return: (start_rev_id, end_rev_id) tuple.
970
branch_revno, branch_rev_id = branch.last_revision_info()
971
1081
start_rev_id = None
972
if start_revision is None:
1083
if start_revision is not None:
1084
if not isinstance(start_revision, revisionspec.RevisionInfo):
1085
raise TypeError(start_revision)
1086
start_rev_id = start_revision.rev_id
1087
start_revno = start_revision.revno
1088
if start_revno is None:
975
if isinstance(start_revision, revisionspec.RevisionInfo):
976
start_rev_id = start_revision.rev_id
977
start_revno = start_revision.revno or 1
979
branch.check_real_revno(start_revision)
980
start_revno = start_revision
981
start_rev_id = branch.get_rev_id(start_revno)
983
1091
end_rev_id = None
984
if end_revision is None:
985
end_revno = branch_revno
987
if isinstance(end_revision, revisionspec.RevisionInfo):
988
end_rev_id = end_revision.rev_id
989
end_revno = end_revision.revno or branch_revno
991
branch.check_real_revno(end_revision)
992
end_revno = end_revision
993
end_rev_id = branch.get_rev_id(end_revno)
1093
if end_revision is not None:
1094
if not isinstance(end_revision, revisionspec.RevisionInfo):
1095
raise TypeError(start_revision)
1096
end_rev_id = end_revision.rev_id
1097
end_revno = end_revision.revno
1098
if end_revno is None:
1100
end_revno = branch.revno()
1101
except errors.GhostRevisionsHaveNoRevno:
995
if branch_revno != 0:
1104
if branch.last_revision() != _mod_revision.NULL_REVISION:
996
1105
if (start_rev_id == _mod_revision.NULL_REVISION
997
1106
or end_rev_id == _mod_revision.NULL_REVISION):
998
raise errors.BzrCommandError('Logging revision 0 is invalid.')
999
if start_revno > end_revno:
1000
raise errors.BzrCommandError("Start revision must be older than "
1001
"the end revision.")
1107
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1108
if end_revno is not None and start_revno > end_revno:
1109
raise errors.BzrCommandError(gettext("Start revision must be "
1110
"older than the end revision."))
1002
1111
return (start_rev_id, end_rev_id)
1054
1163
if ((start_rev_id == _mod_revision.NULL_REVISION)
1055
1164
or (end_rev_id == _mod_revision.NULL_REVISION)):
1056
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1165
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1057
1166
if start_revno > end_revno:
1058
raise errors.BzrCommandError("Start revision must be older than "
1059
"the end revision.")
1167
raise errors.BzrCommandError(gettext("Start revision must be older "
1168
"than the end revision."))
1061
1170
if end_revno < start_revno:
1062
1171
return None, None, None, None
1063
1172
cur_revno = branch_revno
1065
1174
mainline_revs = []
1066
for revision_id in branch.repository.iter_reverse_revision_history(
1067
branch_last_revision):
1175
graph = branch.repository.get_graph()
1176
for revision_id in graph.iter_lefthand_ancestry(
1177
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1068
1178
if cur_revno < start_revno:
1069
1179
# We have gone far enough, but we always add 1 more revision
1070
1180
rev_nos[revision_id] = cur_revno
1084
1194
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1087
@deprecated_function(deprecated_in((2, 2, 0)))
1088
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1089
"""Filter view_revisions based on revision ranges.
1091
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1092
tuples to be filtered.
1094
:param start_rev_id: If not NONE specifies the first revision to be logged.
1095
If NONE then all revisions up to the end_rev_id are logged.
1097
:param end_rev_id: If not NONE specifies the last revision to be logged.
1098
If NONE then all revisions up to the end of the log are logged.
1100
:return: The filtered view_revisions.
1102
if start_rev_id or end_rev_id:
1103
revision_ids = [r for r, n, d in view_revisions]
1105
start_index = revision_ids.index(start_rev_id)
1108
if start_rev_id == end_rev_id:
1109
end_index = start_index
1112
end_index = revision_ids.index(end_rev_id)
1114
end_index = len(view_revisions) - 1
1115
# To include the revisions merged into the last revision,
1116
# extend end_rev_id down to, but not including, the next rev
1117
# with the same or lesser merge_depth
1118
end_merge_depth = view_revisions[end_index][2]
1120
for index in xrange(end_index+1, len(view_revisions)+1):
1121
if view_revisions[index][2] <= end_merge_depth:
1122
end_index = index - 1
1125
# if the search falls off the end then log to the end as well
1126
end_index = len(view_revisions) - 1
1127
view_revisions = view_revisions[start_index:end_index+1]
1128
return view_revisions
1131
1197
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1132
1198
include_merges=True):
1133
1199
r"""Return the list of revision ids which touch a given file id.
1177
1246
# indexing layer. We might consider passing in hints as to the known
1178
1247
# access pattern (sparse/clustered, high success rate/low success
1179
1248
# rate). This particular access is clustered with a low success rate.
1180
get_parent_map = branch.repository.texts.get_parent_map
1181
1249
modified_text_revisions = set()
1182
1250
chunk_size = 1000
1183
for start in xrange(0, len(text_keys), chunk_size):
1251
for start in range(0, len(text_keys), chunk_size):
1184
1252
next_keys = text_keys[start:start + chunk_size]
1185
1253
# Only keep the revision_id portion of the key
1186
1254
modified_text_revisions.update(
1213
@deprecated_function(deprecated_in((2, 2, 0)))
1214
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1215
include_merges=True):
1216
"""Produce an iterator of revisions to show
1217
:return: an iterator of (revision_id, revno, merge_depth)
1218
(if there is no revno for a revision, None is supplied)
1220
if not include_merges:
1221
revision_ids = mainline_revs[1:]
1222
if direction == 'reverse':
1223
revision_ids.reverse()
1224
for revision_id in revision_ids:
1225
yield revision_id, str(rev_nos[revision_id]), 0
1227
graph = branch.repository.get_graph()
1228
# This asks for all mainline revisions, which means we only have to spider
1229
# sideways, rather than depth history. That said, its still size-of-history
1230
# and should be addressed.
1231
# mainline_revisions always includes an extra revision at the beginning, so
1233
parent_map = dict(((key, value) for key, value in
1234
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1235
# filter out ghosts; merge_sort errors on ghosts.
1236
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1237
merge_sorted_revisions = tsort.merge_sort(
1241
generate_revno=True)
1243
if direction == 'forward':
1244
# forward means oldest first.
1245
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1246
elif direction != 'reverse':
1247
raise ValueError('invalid direction %r' % direction)
1249
for (sequence, rev_id, merge_depth, revno, end_of_merge
1250
) in merge_sorted_revisions:
1251
yield rev_id, '.'.join(map(str, revno)), merge_depth
1254
1281
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1255
1282
"""Reverse revisions by depth.
1312
1343
to indicate which LogRevision attributes it supports:
1314
1345
- supports_delta must be True if this log formatter supports delta.
1315
Otherwise the delta attribute may not be populated. The 'delta_format'
1316
attribute describes whether the 'short_status' format (1) or the long
1317
one (2) should be used.
1346
Otherwise the delta attribute may not be populated. The 'delta_format'
1347
attribute describes whether the 'short_status' format (1) or the long
1348
one (2) should be used.
1319
1350
- supports_merge_revisions must be True if this log formatter supports
1320
merge revisions. If not, then only mainline revisions will be passed
1351
merge revisions. If not, then only mainline revisions will be passed
1323
1354
- preferred_levels is the number of levels this formatter defaults to.
1324
The default value is zero meaning display all levels.
1325
This value is only relevant if supports_merge_revisions is True.
1355
The default value is zero meaning display all levels.
1356
This value is only relevant if supports_merge_revisions is True.
1327
1358
- supports_tags must be True if this log formatter supports tags.
1328
Otherwise the tags attribute may not be populated.
1359
Otherwise the tags attribute may not be populated.
1330
1361
- supports_diff must be True if this log formatter supports diffs.
1331
Otherwise the diff attribute may not be populated.
1362
Otherwise the diff attribute may not be populated.
1364
- supports_signatures must be True if this log formatter supports GPG
1333
1367
Plugins can register functions to show custom revision properties using
1334
1368
the properties_handler_registry. The registered function
1335
must respect the following interface description:
1369
must respect the following interface description::
1336
1371
def my_show_properties(properties_dict):
1337
1372
# code that returns a dict {'name':'value'} of the properties
1342
1377
def __init__(self, to_file, show_ids=False, show_timezone='original',
1343
1378
delta_format=None, levels=None, show_advice=False,
1344
to_exact_file=None):
1379
to_exact_file=None, author_list_handler=None):
1345
1380
"""Create a LogFormatter.
1347
1382
:param to_file: the file to output to
1348
:param to_exact_file: if set, gives an output stream to which
1383
:param to_exact_file: if set, gives an output stream to which
1349
1384
non-Unicode diffs are written.
1350
1385
:param show_ids: if True, revision-ids are to be displayed
1351
1386
:param show_timezone: the timezone to use
1414
1452
def short_author(self, rev):
1415
name, address = config.parse_username(rev.get_apparent_authors()[0])
1453
return self.authors(rev, 'first', short=True, sep=', ')
1455
def authors(self, rev, who, short=False, sep=None):
1456
"""Generate list of authors, taking --authors option into account.
1458
The caller has to specify the name of a author list handler,
1459
as provided by the author list registry, using the ``who``
1460
argument. That name only sets a default, though: when the
1461
user selected a different author list generation using the
1462
``--authors`` command line switch, as represented by the
1463
``author_list_handler`` constructor argument, that value takes
1466
:param rev: The revision for which to generate the list of authors.
1467
:param who: Name of the default handler.
1468
:param short: Whether to shorten names to either name or address.
1469
:param sep: What separator to use for automatic concatenation.
1471
if self._author_list_handler is not None:
1472
# The user did specify --authors, which overrides the default
1473
author_list_handler = self._author_list_handler
1475
# The user didn't specify --authors, so we use the caller's default
1476
author_list_handler = author_list_registry.get(who)
1477
names = author_list_handler(rev)
1479
for i in range(len(names)):
1480
name, address = config.parse_username(names[i])
1486
names = sep.join(names)
1420
1489
def merge_marker(self, revision):
1421
1490
"""Get the merge marker to include in the output or '' if none."""
1478
1547
def show_diff(self, to_file, diff, indent):
1479
for l in diff.rstrip().split('\n'):
1480
to_file.write(indent + '%s\n' % (l,))
1548
encoding = get_terminal_encoding()
1549
for l in diff.rstrip().split(b'\n'):
1550
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1483
1553
# Separator between revisions in long format
1515
1586
lines.append('revno: %s%s' % (revision.revno,
1516
1587
self.merge_marker(revision)))
1517
1588
if revision.tags:
1518
lines.append('tags: %s' % (', '.join(revision.tags)))
1589
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1590
if self.show_ids or revision.revno is None:
1591
lines.append('revision-id: %s' % (revision.rev.revision_id.decode('utf-8'),))
1519
1592
if self.show_ids:
1520
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1521
1593
for parent_id in revision.rev.parent_ids:
1522
lines.append('parent: %s' % (parent_id,))
1594
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1523
1595
lines.extend(self.custom_properties(revision.rev))
1525
1597
committer = revision.rev.committer
1526
authors = revision.rev.get_apparent_authors()
1598
authors = self.authors(revision.rev, 'all')
1527
1599
if authors != [committer]:
1528
1600
lines.append('author: %s' % (", ".join(authors),))
1529
1601
lines.append('committer: %s' % (committer,))
1547
1622
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1548
1623
if revision.delta is not None:
1549
1624
# Use the standard status output to display changes
1550
from bzrlib.delta import report_delta
1551
report_delta(to_file, revision.delta, short_status=False,
1625
from breezy.delta import report_delta
1626
report_delta(to_file, revision.delta, short_status=False,
1552
1627
show_ids=self.show_ids, indent=indent)
1553
1628
if revision.diff is not None:
1554
1629
to_file.write(indent + 'diff:\n')
1598
1673
to_file = self.to_file
1600
1675
if revision.tags:
1601
tags = ' {%s}' % (', '.join(revision.tags))
1676
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1602
1677
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1603
revision.revno, self.short_author(revision.rev),
1678
revision.revno or "", self.short_author(revision.rev),
1604
1679
format_date(revision.rev.timestamp,
1605
1680
revision.rev.timezone or 0,
1606
1681
self.show_timezone, date_fmt="%Y-%m-%d",
1607
1682
show_offset=False),
1608
1683
tags, self.merge_marker(revision)))
1609
1684
self.show_properties(revision.rev, indent+offset)
1685
if self.show_ids or revision.revno is None:
1611
1686
to_file.write(indent + offset + 'revision-id:%s\n'
1612
% (revision.rev.revision_id,))
1687
% (revision.rev.revision_id.decode('utf-8'),))
1613
1688
if not revision.rev.message:
1614
1689
to_file.write(indent + offset + '(no message)\n')
1620
1695
if revision.delta is not None:
1621
1696
# Use the standard status output to display changes
1622
from bzrlib.delta import report_delta
1623
report_delta(to_file, revision.delta,
1624
short_status=self.delta_format==1,
1697
from breezy.delta import report_delta
1698
report_delta(to_file, revision.delta,
1699
short_status=self.delta_format==1,
1625
1700
show_ids=self.show_ids, indent=indent + offset)
1626
1701
if revision.diff is not None:
1627
1702
self.show_diff(self.to_exact_file, revision.diff, ' ')
1667
1742
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1668
1743
"""Format log info into one string. Truncate tail of string
1669
:param revno: revision number or None.
1670
Revision numbers counts from 1.
1671
:param rev: revision object
1672
:param max_chars: maximum length of resulting string
1673
:param tags: list of tags or None
1674
:param prefix: string to prefix each line
1675
:return: formatted truncated string
1745
:param revno: revision number or None.
1746
Revision numbers counts from 1.
1747
:param rev: revision object
1748
:param max_chars: maximum length of resulting string
1749
:param tags: list of tags or None
1750
:param prefix: string to prefix each line
1751
:return: formatted truncated string
1679
1755
# show revno only when is not None
1680
1756
out.append("%s:" % revno)
1681
out.append(self.truncate(self.short_author(rev), 20))
1757
if max_chars is not None:
1758
out.append(self.truncate(self.short_author(rev), (max_chars+3)//4))
1760
out.append(self.short_author(rev))
1682
1761
out.append(self.date_string(rev))
1683
1762
if len(rev.parent_ids) > 1:
1684
1763
out.append('[merge]')
1686
tag_str = '{%s}' % (', '.join(tags))
1765
tag_str = '{%s}' % (', '.join(sorted(tags)))
1687
1766
out.append(tag_str)
1688
1767
out.append(rev.get_summary())
1689
1768
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1703
1782
self.show_timezone,
1704
1783
date_fmt='%Y-%m-%d',
1705
1784
show_offset=False)
1706
committer_str = revision.rev.get_apparent_authors()[0].replace (' <', ' <')
1707
to_file.write('%s %s\n\n' % (date_str,committer_str))
1785
committer_str = self.authors(revision.rev, 'first', sep=', ')
1786
committer_str = committer_str.replace(' <', ' <')
1787
to_file.write('%s %s\n\n' % (date_str, committer_str))
1709
1789
if revision.delta is not None and revision.delta.has_changed():
1710
1790
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1712
1792
to_file.write('\t* %s:\n' % (path,))
1713
1793
for c in revision.delta.renamed:
1714
oldpath,newpath = c[:2]
1794
oldpath, newpath = c[:2]
1715
1795
# For renamed files, show both the old and the new path
1716
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1796
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1717
1797
to_file.write('\n')
1719
1799
if not revision.rev.message:
1742
1822
return self.get(name)(*args, **kwargs)
1744
1824
def get_default(self, branch):
1745
return self.get(branch.get_config().log_format())
1825
c = branch.get_config_stack()
1826
return self.get(c.get('log_format'))
1748
1829
log_formatter_registry = LogFormatterRegistry()
1751
1832
log_formatter_registry.register('short', ShortLogFormatter,
1752
'Moderately short log format')
1833
'Moderately short log format.')
1753
1834
log_formatter_registry.register('long', LongLogFormatter,
1754
'Detailed log format')
1835
'Detailed log format.')
1755
1836
log_formatter_registry.register('line', LineLogFormatter,
1756
'Log format with one line per revision')
1837
'Log format with one line per revision.')
1757
1838
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1758
'Format used by GNU ChangeLog files')
1839
'Format used by GNU ChangeLog files.')
1761
1842
def register_formatter(name, formatter):
1772
1853
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1773
1854
except KeyError:
1774
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1777
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1778
# deprecated; for compatibility
1779
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1780
lf.show(revno, rev, delta)
1855
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1858
def author_list_all(rev):
1859
return rev.get_apparent_authors()[:]
1862
def author_list_first(rev):
1863
lst = rev.get_apparent_authors()
1870
def author_list_committer(rev):
1871
return [rev.committer]
1874
author_list_registry = registry.Registry()
1876
author_list_registry.register('all', author_list_all,
1879
author_list_registry.register('first', author_list_first,
1882
author_list_registry.register('committer', author_list_committer,
1783
1886
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1912
2015
output.write('Added Revisions:\n')
1913
2016
start_revno = new_revno - len(new_history) + 1
1914
2017
show_log(branch, lf, None, verbose=False, direction='forward',
1915
start_revision=start_revno,)
2018
start_revision=start_revno)
1918
2021
def show_flat_log(repository, history, last_revno, lf):
1940
2043
:param file_list: the list of paths given on the command line;
1941
2044
the first of these can be a branch location or a file path,
1942
2045
the remainder must be file paths
2046
:param add_cleanup: When the branch returned is read locked,
2047
an unlock call will be queued to the cleanup.
1943
2048
:return: (branch, info_list, start_rev_info, end_rev_info) where
1944
2049
info_list is a list of (relative_path, file_id, kind) tuples where
1945
2050
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
1946
2051
branch will be read-locked.
1948
from builtins import _get_revision_range, safe_relpath_files
1949
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2053
from breezy.builtins import _get_revision_range
2054
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2056
add_cleanup(b.lock_read().unlock)
1951
2057
# XXX: It's damn messy converting a list of paths to relative paths when
1952
2058
# those paths might be deleted ones, they might be on a case-insensitive
1953
2059
# filesystem and/or they might be in silly locations (like another branch).
2018
2124
tree1 = b.repository.revision_tree(rev_id)
2019
2125
file_id = tree1.path2id(fp)
2020
kind = _get_kind_for_file_id(tree1, file_id)
2126
kind = _get_kind_for_file_id(tree1, fp, file_id)
2021
2127
info_list.append((fp, file_id, kind))
2022
2128
return b, info_list, start_rev_info, end_rev_info
2025
def _get_kind_for_file_id(tree, file_id):
2131
def _get_kind_for_file_id(tree, path, file_id):
2026
2132
"""Return the kind of a file-id or None if it doesn't exist."""
2027
2133
if file_id is not None:
2028
return tree.kind(file_id)
2134
return tree.kind(path, file_id)
2035
2141
# Use the properties handlers to print out bug information if available
2036
2142
def _bugs_properties_handler(revision):
2037
if revision.properties.has_key('bugs'):
2143
if 'bugs' in revision.properties:
2038
2144
bug_lines = revision.properties['bugs'].split('\n')
2039
2145
bug_rows = [line.split(' ', 1) for line in bug_lines]
2040
2146
fixed_bug_urls = [row[0] for row in bug_rows if
2041
2147
len(row) > 1 and row[1] == 'fixed']
2043
2149
if fixed_bug_urls:
2044
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2150
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2151
' '.join(fixed_bug_urls)}
2047
2154
properties_handler_registry.register('bugs_properties_handler',