49
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
53
from cStringIO import StringIO
54
from itertools import (
60
56
from warnings import (
64
from bzrlib.lazy_import import lazy_import
60
from .lazy_import import lazy_import
65
61
lazy_import(globals(), """
73
repository as _mod_repository,
74
68
revision as _mod_revision,
71
from breezy.i18n import gettext, ngettext
84
from bzrlib.osutils import (
79
from .osutils import (
86
81
format_date_with_offset_in_original_timezone,
82
get_diff_header_encoding,
87
83
get_terminal_encoding,
91
from bzrlib.symbol_versioning import (
97
def find_touching_revisions(branch, file_id):
91
from .tree import find_previous_path
94
def find_touching_revisions(repository, last_revision, last_tree, last_path):
98
95
"""Yield a description of revisions which affect the file_id.
100
97
Each returned element is (revno, revision_id, description)
105
102
TODO: Perhaps some way to limit this to only particular revisions,
106
103
or to traverse a non-mainline set of revisions?
111
for revision_id in branch.revision_history():
112
this_inv = branch.repository.get_inventory(revision_id)
113
if file_id in this_inv:
114
this_ie = this_inv[file_id]
115
this_path = this_inv.id2path(file_id)
117
this_ie = this_path = None
105
last_verifier = last_tree.get_file_verifier(last_path)
106
graph = repository.get_graph()
107
history = list(graph.iter_lefthand_ancestry(last_revision, []))
109
for revision_id in history:
110
this_tree = repository.revision_tree(revision_id)
111
this_path = find_previous_path(last_tree, this_tree, last_path)
119
113
# now we know how it was last time, and how it is in this revision.
120
114
# are those two states effectively the same or not?
122
if not this_ie and not last_ie:
123
# not present in either
125
elif this_ie and not last_ie:
126
yield revno, revision_id, "added " + this_path
127
elif not this_ie and last_ie:
129
yield revno, revision_id, "deleted " + last_path
115
if this_path is not None and last_path is None:
116
yield revno, revision_id, "deleted " + this_path
117
this_verifier = this_tree.get_file_verifier(this_path)
118
elif this_path is None and last_path is not None:
119
yield revno, revision_id, "added " + last_path
130
120
elif this_path != last_path:
131
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
132
elif (this_ie.text_size != last_ie.text_size
133
or this_ie.text_sha1 != last_ie.text_sha1):
134
yield revno, revision_id, "modified " + this_path
121
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
122
this_verifier = this_tree.get_file_verifier(this_path)
124
this_verifier = this_tree.get_file_verifier(this_path)
125
if (this_verifier != last_verifier):
126
yield revno, revision_id, "modified " + this_path
128
last_verifier = this_verifier
137
129
last_path = this_path
141
def _enumerate_history(branch):
144
for rev_id in branch.revision_history():
145
rh.append((revno, rev_id))
130
last_tree = this_tree
131
if last_path is None:
150
136
def show_log(branch,
199
if isinstance(start_revision, int):
201
start_revision = revisionspec.RevisionInfo(branch, start_revision)
202
except errors.NoSuchRevision:
203
raise errors.InvalidRevisionNumber(start_revision)
205
if isinstance(end_revision, int):
207
end_revision = revisionspec.RevisionInfo(branch, end_revision)
208
except errors.NoSuchRevision:
209
raise errors.InvalidRevisionNumber(end_revision)
211
if end_revision is not None and end_revision.revno == 0:
212
raise errors.InvalidRevisionNumber(end_revision.revno)
209
214
# Build the request and execute it
210
215
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
211
216
start_revision=start_revision, end_revision=end_revision,
228
233
def make_log_request_dict(direction='reverse', specific_fileids=None,
229
234
start_revision=None, end_revision=None, limit=None,
230
message_search=None, levels=1, generate_tags=True,
235
message_search=None, levels=None, generate_tags=True,
232
237
diff_type=None, _match_using_deltas=True,
233
exclude_common_ancestry=False,
238
exclude_common_ancestry=False, match=None,
239
signature=False, omit_merges=False,
235
241
"""Convenience function for making a logging request dictionary.
274
281
:param _match_using_deltas: a private parameter controlling the
275
282
algorithm used for matching specific_fileids. This parameter
276
may be removed in the future so bzrlib client code should NOT
283
may be removed in the future so breezy client code should NOT
279
286
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
280
287
range operator or as a graph difference.
289
:param signature: show digital signature information
291
:param match: Dictionary of list of search strings to use when filtering
292
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
293
the empty string to match any of the preceding properties.
295
:param omit_merges: If True, commits with more than one parent are
299
# Take care of old style message_search parameter
302
if 'message' in match:
303
match['message'].append(message_search)
305
match['message'] = [message_search]
307
match= {'message': [message_search]}
283
309
'direction': direction,
284
310
'specific_fileids': specific_fileids,
285
311
'start_revision': start_revision,
286
312
'end_revision': end_revision,
288
'message_search': message_search,
289
314
'levels': levels,
290
315
'generate_tags': generate_tags,
291
316
'delta_type': delta_type,
292
317
'diff_type': diff_type,
293
318
'exclude_common_ancestry': exclude_common_ancestry,
319
'signature': signature,
321
'omit_merges': omit_merges,
294
322
# Add 'private' attributes for features that may be deprecated
295
323
'_match_using_deltas': _match_using_deltas,
299
327
def _apply_log_request_defaults(rqst):
300
328
"""Apply default values to a request dictionary."""
301
result = _DEFAULT_REQUEST_PARAMS
329
result = _DEFAULT_REQUEST_PARAMS.copy()
303
331
result.update(rqst)
335
def format_signature_validity(rev_id, branch):
336
"""get the signature validity
338
:param rev_id: revision id to validate
339
:param branch: branch of revision
340
:return: human readable string to print to log
342
from breezy import gpg
344
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
345
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
346
if result[0] == gpg.SIGNATURE_VALID:
347
return u"valid signature from {0}".format(result[1])
348
if result[0] == gpg.SIGNATURE_KEY_MISSING:
349
return "unknown key {0}".format(result[1])
350
if result[0] == gpg.SIGNATURE_NOT_VALID:
351
return "invalid signature!"
352
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
353
return "no signature"
307
356
class LogGenerator(object):
308
357
"""A generator of log revisions."""
336
385
if not isinstance(lf, LogFormatter):
337
386
warn("not a LogFormatter instance: %r" % lf)
339
self.branch.lock_read()
388
with self.branch.lock_read():
341
389
if getattr(lf, 'begin_log', None):
343
391
self._show_body(lf)
344
392
if getattr(lf, 'end_log', None):
349
395
def _show_body(self, lf):
350
396
"""Show the main log output.
354
400
# Tweak the LogRequest based on what the LogFormatter can handle.
355
401
# (There's no point generating stuff if the formatter can't display it.)
357
rqst['levels'] = lf.get_levels()
403
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
404
# user didn't specify levels, use whatever the LF can handle:
405
rqst['levels'] = lf.get_levels()
358
407
if not getattr(lf, 'supports_tags', False):
359
408
rqst['generate_tags'] = False
360
409
if not getattr(lf, 'supports_delta', False):
361
410
rqst['delta_type'] = None
362
411
if not getattr(lf, 'supports_diff', False):
363
412
rqst['diff_type'] = None
413
if not getattr(lf, 'supports_signatures', False):
414
rqst['signature'] = False
365
416
# Find and print the interesting revisions
366
417
generator = self._generator_factory(self.branch, rqst)
367
for lr in generator.iter_log_revisions():
419
for lr in generator.iter_log_revisions():
421
except errors.GhostRevisionUnusableHere:
422
raise errors.BzrCommandError(
423
gettext('Further revision history missing.'))
371
426
def _generator_factory(self, branch, rqst):
372
427
"""Make the LogGenerator object to use.
374
429
Subclasses may wish to override this.
376
431
return _DefaultLogGenerator(branch, rqst)
400
455
levels = rqst.get('levels')
401
456
limit = rqst.get('limit')
402
457
diff_type = rqst.get('diff_type')
458
show_signature = rqst.get('signature')
459
omit_merges = rqst.get('omit_merges')
404
461
revision_iterator = self._create_log_revision_iterator()
405
462
for revs in revision_iterator:
406
463
for (rev_id, revno, merge_depth), rev, delta in revs:
407
464
# 0 levels means show everything; merge_depth counts from 0
408
if levels != 0 and merge_depth >= levels:
465
if levels != 0 and merge_depth is not None and merge_depth >= levels:
467
if omit_merges and len(rev.parent_ids) > 1:
470
raise errors.GhostRevisionUnusableHere(rev_id)
410
471
if diff_type is None:
413
474
diff = self._format_diff(rev, rev_id, diff_type)
476
signature = format_signature_validity(rev_id, self.branch)
414
479
yield LogRevision(rev, revno, merge_depth, delta,
415
self.rev_tag_dict.get(rev_id), diff)
480
self.rev_tag_dict.get(rev_id), diff, signature)
418
483
if log_count >= limit:
491
557
rqst.get('specific_fileids')[0], view_revisions,
492
558
include_merges=rqst.get('levels') != 1)
493
559
return make_log_rev_iterator(self.branch, view_revisions,
494
rqst.get('delta_type'), rqst.get('message_search'))
560
rqst.get('delta_type'), rqst.get('match'))
497
563
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
505
571
a list of the same tuples.
507
573
if (exclude_common_ancestry and start_rev_id == end_rev_id):
508
raise errors.BzrCommandError(
509
'--exclude-common-ancestry requires two different revisions')
574
raise errors.BzrCommandError(gettext(
575
'--exclude-common-ancestry requires two different revisions'))
510
576
if direction not in ('reverse', 'forward'):
511
raise ValueError('invalid direction %r' % direction)
512
br_revno, br_rev_id = branch.last_revision_info()
577
raise ValueError(gettext('invalid direction %r') % direction)
578
br_rev_id = branch.last_revision()
579
if br_rev_id == _mod_revision.NULL_REVISION:
516
582
if (end_rev_id and start_rev_id == end_rev_id
517
583
and (not generate_merge_revisions
518
584
or not _has_merges(branch, end_rev_id))):
519
585
# If a single revision is requested, check we can handle it
520
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
522
elif not generate_merge_revisions:
523
# If we only want to see linear revisions, we can iterate ...
524
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
526
if direction == 'forward':
527
iter_revs = reversed(iter_revs)
529
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
530
direction, delayed_graph_generation,
531
exclude_common_ancestry)
532
if direction == 'forward':
533
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
586
return _generate_one_revision(branch, end_rev_id, br_rev_id,
588
if not generate_merge_revisions:
590
# If we only want to see linear revisions, we can iterate ...
591
iter_revs = _linear_view_revisions(
592
branch, start_rev_id, end_rev_id,
593
exclude_common_ancestry=exclude_common_ancestry)
594
# If a start limit was given and it's not obviously an
595
# ancestor of the end limit, check it before outputting anything
596
if (direction == 'forward'
597
or (start_rev_id and not _is_obvious_ancestor(
598
branch, start_rev_id, end_rev_id))):
599
iter_revs = list(iter_revs)
600
if direction == 'forward':
601
iter_revs = reversed(iter_revs)
603
except _StartNotLinearAncestor:
604
# Switch to the slower implementation that may be able to find a
605
# non-obvious ancestor out of the left-hand history.
607
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
608
direction, delayed_graph_generation,
609
exclude_common_ancestry)
610
if direction == 'forward':
611
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
540
618
return [(br_rev_id, br_revno, 0)]
542
revno = branch.revision_id_to_dotted_revno(rev_id)
543
revno_str = '.'.join(str(n) for n in revno)
620
revno_str = _compute_revno_str(branch, rev_id)
544
621
return [(rev_id, revno_str, 0)]
547
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
548
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
549
# If a start limit was given and it's not obviously an
550
# ancestor of the end limit, check it before outputting anything
551
if direction == 'forward' or (start_rev_id
552
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
554
result = list(result)
555
except _StartNotLinearAncestor:
556
raise errors.BzrCommandError('Start revision not found in'
557
' left-hand history of end revision.')
561
624
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
562
625
delayed_graph_generation,
563
626
exclude_common_ancestry=False):
572
635
if delayed_graph_generation:
574
637
for rev_id, revno, depth in _linear_view_revisions(
575
branch, start_rev_id, end_rev_id):
638
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
576
639
if _has_merges(branch, rev_id):
577
640
# The end_rev_id can be nested down somewhere. We need an
578
641
# explicit ancestry check. There is an ambiguity here as we
599
662
except _StartNotLinearAncestor:
600
663
# A merge was never detected so the lower revision limit can't
601
664
# be nested down somewhere
602
raise errors.BzrCommandError('Start revision not found in'
603
' history of end revision.')
665
raise errors.BzrCommandError(gettext('Start revision not found in'
666
' history of end revision.'))
605
668
# We exit the loop above because we encounter a revision with merges, from
606
669
# this revision, we need to switch to _graph_view_revisions.
610
673
# shown naturally, i.e. just like it is for linear logging. We can easily
611
674
# make forward the exact opposite display, but showing the merge revisions
612
675
# indented at the end seems slightly nicer in that case.
613
view_revisions = chain(iter(initial_revisions),
676
view_revisions = itertools.chain(iter(initial_revisions),
614
677
_graph_view_revisions(branch, start_rev_id, end_rev_id,
615
678
rebase_initial_depths=(direction == 'reverse'),
616
679
exclude_common_ancestry=exclude_common_ancestry))
623
686
return len(parents) > 1
689
def _compute_revno_str(branch, rev_id):
690
"""Compute the revno string from a rev_id.
692
:return: The revno string, or None if the revision is not in the supplied
696
revno = branch.revision_id_to_dotted_revno(rev_id)
697
except errors.NoSuchRevision:
698
# The revision must be outside of this branch
701
return '.'.join(str(n) for n in revno)
626
704
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
627
705
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
628
706
if start_rev_id and end_rev_id:
629
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
630
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
708
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
709
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
710
except errors.NoSuchRevision:
711
# one or both is not in the branch; not obvious
631
713
if len(start_dotted) == 1 and len(end_dotted) == 1:
632
714
# both on mainline
633
715
return start_dotted[0] <= end_dotted[0]
646
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
728
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
729
exclude_common_ancestry=False):
647
730
"""Calculate a sequence of revisions to view, newest to oldest.
649
732
:param start_rev_id: the lower revision-id
650
733
:param end_rev_id: the upper revision-id
734
:param exclude_common_ancestry: Whether the start_rev_id should be part of
735
the iterated revisions.
651
736
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
737
dotted_revno will be None for ghosts
652
738
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
653
is not found walking the left-hand history
739
is not found walking the left-hand history
655
br_revno, br_rev_id = branch.last_revision_info()
656
741
repo = branch.repository
742
graph = repo.get_graph()
657
743
if start_rev_id is None and end_rev_id is None:
659
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
660
yield revision_id, str(cur_revno), 0
745
br_revno, br_rev_id = branch.last_revision_info()
746
except errors.GhostRevisionsHaveNoRevno:
747
br_rev_id = branch.last_revision()
751
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
752
(_mod_revision.NULL_REVISION,))
755
revision_id = next(graph_iter)
756
except errors.RevisionNotPresent as e:
758
yield e.revision_id, None, None
760
except StopIteration:
763
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
764
if cur_revno is not None:
767
br_rev_id = branch.last_revision()
663
768
if end_rev_id is None:
664
769
end_rev_id = br_rev_id
665
770
found_start = start_rev_id is None
666
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
667
revno = branch.revision_id_to_dotted_revno(revision_id)
668
revno_str = '.'.join(str(n) for n in revno)
669
if not found_start and revision_id == start_rev_id:
670
yield revision_id, revno_str, 0
771
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
772
(_mod_revision.NULL_REVISION,))
775
revision_id = next(graph_iter)
776
except StopIteration:
778
except errors.RevisionNotPresent as e:
780
yield e.revision_id, None, None
674
yield revision_id, revno_str, 0
677
raise _StartNotLinearAncestor()
783
revno_str = _compute_revno_str(branch, revision_id)
784
if not found_start and revision_id == start_rev_id:
785
if not exclude_common_ancestry:
786
yield revision_id, revno_str, 0
790
yield revision_id, revno_str, 0
792
raise _StartNotLinearAncestor()
680
795
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
721
836
yield rev_id, '.'.join(map(str, revno)), merge_depth
724
@deprecated_function(deprecated_in((2, 2, 0)))
725
def calculate_view_revisions(branch, start_revision, end_revision, direction,
726
specific_fileid, generate_merge_revisions):
727
"""Calculate the revisions to view.
729
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
730
a list of the same tuples.
732
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
734
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
735
direction, generate_merge_revisions or specific_fileid))
737
view_revisions = _filter_revisions_touching_file_id(branch,
738
specific_fileid, view_revisions,
739
include_merges=generate_merge_revisions)
740
return _rebase_merge_depth(view_revisions)
743
839
def _rebase_merge_depth(view_revisions):
744
840
"""Adjust depths upwards so the top level is 0."""
745
841
# If either the first or last revision have a merge_depth of 0, we're done
746
842
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
747
min_depth = min([d for r,n,d in view_revisions])
843
min_depth = min([d for r, n, d in view_revisions])
748
844
if min_depth != 0:
749
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
845
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
750
846
return view_revisions
768
864
# Convert view_revisions into (view, None, None) groups to fit with
769
865
# the standard interface here.
770
if type(view_revisions) == list:
866
if isinstance(view_revisions, list):
771
867
# A single batch conversion is faster than many incremental ones.
772
868
# As we have all the data, do a batch conversion.
773
869
nones = [None] * len(view_revisions)
774
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
870
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
777
873
for view in view_revisions:
789
885
return log_rev_iterator
792
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
888
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
793
889
"""Create a filtered iterator of log_rev_iterator matching on a regex.
795
891
:param branch: The branch being logged.
796
892
:param generate_delta: Whether to generate a delta for each revision.
797
:param search: A user text search string.
893
:param match: A dictionary with properties as keys and lists of strings
894
as values. To match, a revision may match any of the supplied strings
895
within a single property but must match at least one string for each
798
897
:param log_rev_iterator: An input iterator containing all revisions that
799
898
could be displayed, in lists.
800
899
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
804
903
return log_rev_iterator
805
searchRE = re_compile_checked(search, re.IGNORECASE,
806
'log message filter')
807
return _filter_message_re(searchRE, log_rev_iterator)
810
def _filter_message_re(searchRE, log_rev_iterator):
904
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
905
for k, v in match.items()]
906
return _filter_re(searchRE, log_rev_iterator)
909
def _filter_re(searchRE, log_rev_iterator):
811
910
for revs in log_rev_iterator:
813
for (rev_id, revno, merge_depth), rev, delta in revs:
814
if searchRE.search(rev.message):
815
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
911
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
915
def _match_filter(searchRE, rev):
917
'message': (rev.message,),
918
'committer': (rev.committer,),
919
'author': (rev.get_apparent_authors()),
920
'bugs': list(rev.iter_bugs())
922
strings[''] = [item for inner_list in strings.values()
923
for item in inner_list]
924
for (k, v) in searchRE:
925
if k in strings and not _match_any_filter(strings[k], v):
929
def _match_any_filter(strings, res):
930
return any(re.search(s) for re in res for s in strings)
819
932
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
820
933
fileids=None, direction='reverse'):
866
979
if delta_type == 'full' and not check_fileids:
867
980
deltas = repository.get_deltas_for_revisions(revisions)
868
for rev, delta in izip(revs, deltas):
981
for rev, delta in zip(revs, deltas):
869
982
new_revs.append((rev[0], rev[1], delta))
871
984
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
872
for rev, delta in izip(revs, deltas):
985
for rev, delta in zip(revs, deltas):
873
986
if check_fileids:
874
987
if delta is None or not delta.has_changed():
923
1036
for revs in log_rev_iterator:
924
1037
# r = revision_id, n = revno, d = merge depth
925
1038
revision_ids = [view[0] for view, _, _ in revs]
926
revisions = repository.get_revisions(revision_ids)
927
revs = [(rev[0], revision, rev[2]) for rev, revision in
928
izip(revs, revisions)]
1039
revisions = dict(repository.iter_revisions(revision_ids))
1040
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
932
1043
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
968
1077
:return: (start_rev_id, end_rev_id) tuple.
970
branch_revno, branch_rev_id = branch.last_revision_info()
971
1079
start_rev_id = None
972
if start_revision is None:
1081
if start_revision is not None:
1082
if not isinstance(start_revision, revisionspec.RevisionInfo):
1083
raise TypeError(start_revision)
1084
start_rev_id = start_revision.rev_id
1085
start_revno = start_revision.revno
1086
if start_revno is None:
975
if isinstance(start_revision, revisionspec.RevisionInfo):
976
start_rev_id = start_revision.rev_id
977
start_revno = start_revision.revno or 1
979
branch.check_real_revno(start_revision)
980
start_revno = start_revision
981
start_rev_id = branch.get_rev_id(start_revno)
983
1089
end_rev_id = None
984
if end_revision is None:
985
end_revno = branch_revno
987
if isinstance(end_revision, revisionspec.RevisionInfo):
988
end_rev_id = end_revision.rev_id
989
end_revno = end_revision.revno or branch_revno
991
branch.check_real_revno(end_revision)
992
end_revno = end_revision
993
end_rev_id = branch.get_rev_id(end_revno)
1091
if end_revision is not None:
1092
if not isinstance(end_revision, revisionspec.RevisionInfo):
1093
raise TypeError(start_revision)
1094
end_rev_id = end_revision.rev_id
1095
end_revno = end_revision.revno
1096
if end_revno is None:
1098
end_revno = branch.revno()
1099
except errors.GhostRevisionsHaveNoRevno:
995
if branch_revno != 0:
1102
if branch.last_revision() != _mod_revision.NULL_REVISION:
996
1103
if (start_rev_id == _mod_revision.NULL_REVISION
997
1104
or end_rev_id == _mod_revision.NULL_REVISION):
998
raise errors.BzrCommandError('Logging revision 0 is invalid.')
999
if start_revno > end_revno:
1000
raise errors.BzrCommandError("Start revision must be older than "
1001
"the end revision.")
1105
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1106
if end_revno is not None and start_revno > end_revno:
1107
raise errors.BzrCommandError(gettext("Start revision must be "
1108
"older than the end revision."))
1002
1109
return (start_rev_id, end_rev_id)
1054
1161
if ((start_rev_id == _mod_revision.NULL_REVISION)
1055
1162
or (end_rev_id == _mod_revision.NULL_REVISION)):
1056
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1163
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1057
1164
if start_revno > end_revno:
1058
raise errors.BzrCommandError("Start revision must be older than "
1059
"the end revision.")
1165
raise errors.BzrCommandError(gettext("Start revision must be older "
1166
"than the end revision."))
1061
1168
if end_revno < start_revno:
1062
1169
return None, None, None, None
1063
1170
cur_revno = branch_revno
1065
1172
mainline_revs = []
1066
for revision_id in branch.repository.iter_reverse_revision_history(
1067
branch_last_revision):
1173
graph = branch.repository.get_graph()
1174
for revision_id in graph.iter_lefthand_ancestry(
1175
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1068
1176
if cur_revno < start_revno:
1069
1177
# We have gone far enough, but we always add 1 more revision
1070
1178
rev_nos[revision_id] = cur_revno
1084
1192
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1087
@deprecated_function(deprecated_in((2, 2, 0)))
1088
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1089
"""Filter view_revisions based on revision ranges.
1091
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1092
tuples to be filtered.
1094
:param start_rev_id: If not NONE specifies the first revision to be logged.
1095
If NONE then all revisions up to the end_rev_id are logged.
1097
:param end_rev_id: If not NONE specifies the last revision to be logged.
1098
If NONE then all revisions up to the end of the log are logged.
1100
:return: The filtered view_revisions.
1102
if start_rev_id or end_rev_id:
1103
revision_ids = [r for r, n, d in view_revisions]
1105
start_index = revision_ids.index(start_rev_id)
1108
if start_rev_id == end_rev_id:
1109
end_index = start_index
1112
end_index = revision_ids.index(end_rev_id)
1114
end_index = len(view_revisions) - 1
1115
# To include the revisions merged into the last revision,
1116
# extend end_rev_id down to, but not including, the next rev
1117
# with the same or lesser merge_depth
1118
end_merge_depth = view_revisions[end_index][2]
1120
for index in xrange(end_index+1, len(view_revisions)+1):
1121
if view_revisions[index][2] <= end_merge_depth:
1122
end_index = index - 1
1125
# if the search falls off the end then log to the end as well
1126
end_index = len(view_revisions) - 1
1127
view_revisions = view_revisions[start_index:end_index+1]
1128
return view_revisions
1131
1195
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1132
1196
include_merges=True):
1133
1197
r"""Return the list of revision ids which touch a given file id.
1177
1244
# indexing layer. We might consider passing in hints as to the known
1178
1245
# access pattern (sparse/clustered, high success rate/low success
1179
1246
# rate). This particular access is clustered with a low success rate.
1180
get_parent_map = branch.repository.texts.get_parent_map
1181
1247
modified_text_revisions = set()
1182
1248
chunk_size = 1000
1183
for start in xrange(0, len(text_keys), chunk_size):
1249
for start in range(0, len(text_keys), chunk_size):
1184
1250
next_keys = text_keys[start:start + chunk_size]
1185
1251
# Only keep the revision_id portion of the key
1186
1252
modified_text_revisions.update(
1213
@deprecated_function(deprecated_in((2, 2, 0)))
1214
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1215
include_merges=True):
1216
"""Produce an iterator of revisions to show
1217
:return: an iterator of (revision_id, revno, merge_depth)
1218
(if there is no revno for a revision, None is supplied)
1220
if not include_merges:
1221
revision_ids = mainline_revs[1:]
1222
if direction == 'reverse':
1223
revision_ids.reverse()
1224
for revision_id in revision_ids:
1225
yield revision_id, str(rev_nos[revision_id]), 0
1227
graph = branch.repository.get_graph()
1228
# This asks for all mainline revisions, which means we only have to spider
1229
# sideways, rather than depth history. That said, its still size-of-history
1230
# and should be addressed.
1231
# mainline_revisions always includes an extra revision at the beginning, so
1233
parent_map = dict(((key, value) for key, value in
1234
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1235
# filter out ghosts; merge_sort errors on ghosts.
1236
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1237
merge_sorted_revisions = tsort.merge_sort(
1241
generate_revno=True)
1243
if direction == 'forward':
1244
# forward means oldest first.
1245
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1246
elif direction != 'reverse':
1247
raise ValueError('invalid direction %r' % direction)
1249
for (sequence, rev_id, merge_depth, revno, end_of_merge
1250
) in merge_sorted_revisions:
1251
yield rev_id, '.'.join(map(str, revno)), merge_depth
1254
1279
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1255
1280
"""Reverse revisions by depth.
1312
1341
to indicate which LogRevision attributes it supports:
1314
1343
- supports_delta must be True if this log formatter supports delta.
1315
Otherwise the delta attribute may not be populated. The 'delta_format'
1316
attribute describes whether the 'short_status' format (1) or the long
1317
one (2) should be used.
1344
Otherwise the delta attribute may not be populated. The 'delta_format'
1345
attribute describes whether the 'short_status' format (1) or the long
1346
one (2) should be used.
1319
1348
- supports_merge_revisions must be True if this log formatter supports
1320
merge revisions. If not, then only mainline revisions will be passed
1349
merge revisions. If not, then only mainline revisions will be passed
1323
1352
- preferred_levels is the number of levels this formatter defaults to.
1324
The default value is zero meaning display all levels.
1325
This value is only relevant if supports_merge_revisions is True.
1353
The default value is zero meaning display all levels.
1354
This value is only relevant if supports_merge_revisions is True.
1327
1356
- supports_tags must be True if this log formatter supports tags.
1328
Otherwise the tags attribute may not be populated.
1357
Otherwise the tags attribute may not be populated.
1330
1359
- supports_diff must be True if this log formatter supports diffs.
1331
Otherwise the diff attribute may not be populated.
1360
Otherwise the diff attribute may not be populated.
1362
- supports_signatures must be True if this log formatter supports GPG
1333
1365
Plugins can register functions to show custom revision properties using
1334
1366
the properties_handler_registry. The registered function
1335
must respect the following interface description:
1367
must respect the following interface description::
1336
1369
def my_show_properties(properties_dict):
1337
1370
# code that returns a dict {'name':'value'} of the properties
1342
1375
def __init__(self, to_file, show_ids=False, show_timezone='original',
1343
1376
delta_format=None, levels=None, show_advice=False,
1344
to_exact_file=None):
1377
to_exact_file=None, author_list_handler=None):
1345
1378
"""Create a LogFormatter.
1347
1380
:param to_file: the file to output to
1348
:param to_exact_file: if set, gives an output stream to which
1381
:param to_exact_file: if set, gives an output stream to which
1349
1382
non-Unicode diffs are written.
1350
1383
:param show_ids: if True, revision-ids are to be displayed
1351
1384
:param show_timezone: the timezone to use
1414
1450
def short_author(self, rev):
1415
name, address = config.parse_username(rev.get_apparent_authors()[0])
1451
return self.authors(rev, 'first', short=True, sep=', ')
1453
def authors(self, rev, who, short=False, sep=None):
1454
"""Generate list of authors, taking --authors option into account.
1456
The caller has to specify the name of a author list handler,
1457
as provided by the author list registry, using the ``who``
1458
argument. That name only sets a default, though: when the
1459
user selected a different author list generation using the
1460
``--authors`` command line switch, as represented by the
1461
``author_list_handler`` constructor argument, that value takes
1464
:param rev: The revision for which to generate the list of authors.
1465
:param who: Name of the default handler.
1466
:param short: Whether to shorten names to either name or address.
1467
:param sep: What separator to use for automatic concatenation.
1469
if self._author_list_handler is not None:
1470
# The user did specify --authors, which overrides the default
1471
author_list_handler = self._author_list_handler
1473
# The user didn't specify --authors, so we use the caller's default
1474
author_list_handler = author_list_registry.get(who)
1475
names = author_list_handler(rev)
1477
for i in range(len(names)):
1478
name, address = config.parse_username(names[i])
1484
names = sep.join(names)
1420
1487
def merge_marker(self, revision):
1421
1488
"""Get the merge marker to include in the output or '' if none."""
1478
1545
def show_diff(self, to_file, diff, indent):
1479
for l in diff.rstrip().split('\n'):
1480
to_file.write(indent + '%s\n' % (l,))
1546
encoding = get_terminal_encoding()
1547
for l in diff.rstrip().split(b'\n'):
1548
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1483
1551
# Separator between revisions in long format
1515
1584
lines.append('revno: %s%s' % (revision.revno,
1516
1585
self.merge_marker(revision)))
1517
1586
if revision.tags:
1518
lines.append('tags: %s' % (', '.join(revision.tags)))
1587
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1588
if self.show_ids or revision.revno is None:
1589
lines.append('revision-id: %s' % (revision.rev.revision_id.decode('utf-8'),))
1519
1590
if self.show_ids:
1520
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1521
1591
for parent_id in revision.rev.parent_ids:
1522
lines.append('parent: %s' % (parent_id,))
1592
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1523
1593
lines.extend(self.custom_properties(revision.rev))
1525
1595
committer = revision.rev.committer
1526
authors = revision.rev.get_apparent_authors()
1596
authors = self.authors(revision.rev, 'all')
1527
1597
if authors != [committer]:
1528
1598
lines.append('author: %s' % (", ".join(authors),))
1529
1599
lines.append('committer: %s' % (committer,))
1547
1620
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1548
1621
if revision.delta is not None:
1549
1622
# Use the standard status output to display changes
1550
from bzrlib.delta import report_delta
1551
report_delta(to_file, revision.delta, short_status=False,
1623
from breezy.delta import report_delta
1624
report_delta(to_file, revision.delta, short_status=False,
1552
1625
show_ids=self.show_ids, indent=indent)
1553
1626
if revision.diff is not None:
1554
1627
to_file.write(indent + 'diff:\n')
1598
1671
to_file = self.to_file
1600
1673
if revision.tags:
1601
tags = ' {%s}' % (', '.join(revision.tags))
1674
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1602
1675
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1603
revision.revno, self.short_author(revision.rev),
1676
revision.revno or "", self.short_author(revision.rev),
1604
1677
format_date(revision.rev.timestamp,
1605
1678
revision.rev.timezone or 0,
1606
1679
self.show_timezone, date_fmt="%Y-%m-%d",
1607
1680
show_offset=False),
1608
1681
tags, self.merge_marker(revision)))
1609
1682
self.show_properties(revision.rev, indent+offset)
1683
if self.show_ids or revision.revno is None:
1611
1684
to_file.write(indent + offset + 'revision-id:%s\n'
1612
% (revision.rev.revision_id,))
1685
% (revision.rev.revision_id.decode('utf-8'),))
1613
1686
if not revision.rev.message:
1614
1687
to_file.write(indent + offset + '(no message)\n')
1620
1693
if revision.delta is not None:
1621
1694
# Use the standard status output to display changes
1622
from bzrlib.delta import report_delta
1623
report_delta(to_file, revision.delta,
1624
short_status=self.delta_format==1,
1695
from breezy.delta import report_delta
1696
report_delta(to_file, revision.delta,
1697
short_status=self.delta_format==1,
1625
1698
show_ids=self.show_ids, indent=indent + offset)
1626
1699
if revision.diff is not None:
1627
1700
self.show_diff(self.to_exact_file, revision.diff, ' ')
1667
1740
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1668
1741
"""Format log info into one string. Truncate tail of string
1669
:param revno: revision number or None.
1670
Revision numbers counts from 1.
1671
:param rev: revision object
1672
:param max_chars: maximum length of resulting string
1673
:param tags: list of tags or None
1674
:param prefix: string to prefix each line
1675
:return: formatted truncated string
1743
:param revno: revision number or None.
1744
Revision numbers counts from 1.
1745
:param rev: revision object
1746
:param max_chars: maximum length of resulting string
1747
:param tags: list of tags or None
1748
:param prefix: string to prefix each line
1749
:return: formatted truncated string
1679
1753
# show revno only when is not None
1680
1754
out.append("%s:" % revno)
1681
out.append(self.truncate(self.short_author(rev), 20))
1755
if max_chars is not None:
1756
out.append(self.truncate(self.short_author(rev), (max_chars+3)//4))
1758
out.append(self.short_author(rev))
1682
1759
out.append(self.date_string(rev))
1683
1760
if len(rev.parent_ids) > 1:
1684
1761
out.append('[merge]')
1686
tag_str = '{%s}' % (', '.join(tags))
1763
tag_str = '{%s}' % (', '.join(sorted(tags)))
1687
1764
out.append(tag_str)
1688
1765
out.append(rev.get_summary())
1689
1766
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1703
1780
self.show_timezone,
1704
1781
date_fmt='%Y-%m-%d',
1705
1782
show_offset=False)
1706
committer_str = revision.rev.get_apparent_authors()[0].replace (' <', ' <')
1707
to_file.write('%s %s\n\n' % (date_str,committer_str))
1783
committer_str = self.authors(revision.rev, 'first', sep=', ')
1784
committer_str = committer_str.replace(' <', ' <')
1785
to_file.write('%s %s\n\n' % (date_str, committer_str))
1709
1787
if revision.delta is not None and revision.delta.has_changed():
1710
1788
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1712
1790
to_file.write('\t* %s:\n' % (path,))
1713
1791
for c in revision.delta.renamed:
1714
oldpath,newpath = c[:2]
1792
oldpath, newpath = c[:2]
1715
1793
# For renamed files, show both the old and the new path
1716
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1794
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1717
1795
to_file.write('\n')
1719
1797
if not revision.rev.message:
1742
1820
return self.get(name)(*args, **kwargs)
1744
1822
def get_default(self, branch):
1745
return self.get(branch.get_config().log_format())
1823
c = branch.get_config_stack()
1824
return self.get(c.get('log_format'))
1748
1827
log_formatter_registry = LogFormatterRegistry()
1751
1830
log_formatter_registry.register('short', ShortLogFormatter,
1752
'Moderately short log format')
1831
'Moderately short log format.')
1753
1832
log_formatter_registry.register('long', LongLogFormatter,
1754
'Detailed log format')
1833
'Detailed log format.')
1755
1834
log_formatter_registry.register('line', LineLogFormatter,
1756
'Log format with one line per revision')
1835
'Log format with one line per revision.')
1757
1836
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1758
'Format used by GNU ChangeLog files')
1837
'Format used by GNU ChangeLog files.')
1761
1840
def register_formatter(name, formatter):
1772
1851
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1773
1852
except KeyError:
1774
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1777
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1778
# deprecated; for compatibility
1779
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1780
lf.show(revno, rev, delta)
1853
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1856
def author_list_all(rev):
1857
return rev.get_apparent_authors()[:]
1860
def author_list_first(rev):
1861
lst = rev.get_apparent_authors()
1868
def author_list_committer(rev):
1869
return [rev.committer]
1872
author_list_registry = registry.Registry()
1874
author_list_registry.register('all', author_list_all,
1877
author_list_registry.register('first', author_list_first,
1880
author_list_registry.register('committer', author_list_committer,
1783
1884
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1912
2013
output.write('Added Revisions:\n')
1913
2014
start_revno = new_revno - len(new_history) + 1
1914
2015
show_log(branch, lf, None, verbose=False, direction='forward',
1915
start_revision=start_revno,)
2016
start_revision=start_revno)
1918
2019
def show_flat_log(repository, history, last_revno, lf):
1940
2041
:param file_list: the list of paths given on the command line;
1941
2042
the first of these can be a branch location or a file path,
1942
2043
the remainder must be file paths
2044
:param add_cleanup: When the branch returned is read locked,
2045
an unlock call will be queued to the cleanup.
1943
2046
:return: (branch, info_list, start_rev_info, end_rev_info) where
1944
2047
info_list is a list of (relative_path, file_id, kind) tuples where
1945
2048
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
1946
2049
branch will be read-locked.
1948
from builtins import _get_revision_range, safe_relpath_files
1949
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2051
from breezy.builtins import _get_revision_range
2052
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2054
add_cleanup(b.lock_read().unlock)
1951
2055
# XXX: It's damn messy converting a list of paths to relative paths when
1952
2056
# those paths might be deleted ones, they might be on a case-insensitive
1953
2057
# filesystem and/or they might be in silly locations (like another branch).
2018
2122
tree1 = b.repository.revision_tree(rev_id)
2019
2123
file_id = tree1.path2id(fp)
2020
kind = _get_kind_for_file_id(tree1, file_id)
2124
kind = _get_kind_for_file_id(tree1, fp, file_id)
2021
2125
info_list.append((fp, file_id, kind))
2022
2126
return b, info_list, start_rev_info, end_rev_info
2025
def _get_kind_for_file_id(tree, file_id):
2129
def _get_kind_for_file_id(tree, path, file_id):
2026
2130
"""Return the kind of a file-id or None if it doesn't exist."""
2027
2131
if file_id is not None:
2028
return tree.kind(file_id)
2132
return tree.kind(path, file_id)
2035
2139
# Use the properties handlers to print out bug information if available
2036
2140
def _bugs_properties_handler(revision):
2037
if revision.properties.has_key('bugs'):
2142
if 'bugs' in revision.properties:
2038
2143
bug_lines = revision.properties['bugs'].split('\n')
2039
2144
bug_rows = [line.split(' ', 1) for line in bug_lines]
2040
2145
fixed_bug_urls = [row[0] for row in bug_rows if
2041
2146
len(row) > 1 and row[1] == 'fixed']
2147
related_bug_urls = [row[0] for row in bug_rows if
2148
len(row) > 1 and row[1] == 'related']
2043
2149
if fixed_bug_urls:
2044
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2150
ret[ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls))] = (
2151
' '.join(fixed_bug_urls))
2152
if related_bug_urls:
2153
ret[ngettext('related bug', 'related bugs', len(related_bug_urls))] = (
2154
' '.join(related_bug_urls))
2047
2157
properties_handler_registry.register('bugs_properties_handler',
2048
2158
_bugs_properties_handler)