47
49
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
53
from cStringIO import StringIO
54
from itertools import (
56
60
from warnings import (
60
from .lazy_import import lazy_import
64
from bzrlib.lazy_import import lazy_import
61
65
lazy_import(globals(), """
68
73
repository as _mod_repository,
69
74
revision as _mod_revision,
72
from breezy.i18n import gettext, ngettext
81
from .osutils import (
84
from bzrlib.osutils import (
83
86
format_date_with_offset_in_original_timezone,
84
get_diff_header_encoding,
85
87
get_terminal_encoding,
91
from bzrlib.symbol_versioning import (
93
from .tree import find_previous_path
96
def find_touching_revisions(repository, last_revision, last_tree, last_path):
97
def find_touching_revisions(branch, file_id):
97
98
"""Yield a description of revisions which affect the file_id.
99
100
Each returned element is (revno, revision_id, description)
104
105
TODO: Perhaps some way to limit this to only particular revisions,
105
106
or to traverse a non-mainline set of revisions?
107
last_verifier = last_tree.get_file_verifier(last_path)
108
graph = repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(last_revision, []))
111
for revision_id in history:
112
this_tree = repository.revision_tree(revision_id)
113
this_path = find_previous_path(last_tree, this_tree, last_path)
111
for revision_id in branch.revision_history():
112
this_inv = branch.repository.get_inventory(revision_id)
113
if file_id in this_inv:
114
this_ie = this_inv[file_id]
115
this_path = this_inv.id2path(file_id)
117
this_ie = this_path = None
115
119
# now we know how it was last time, and how it is in this revision.
116
120
# are those two states effectively the same or not?
117
if this_path is not None and last_path is None:
118
yield revno, revision_id, "deleted " + this_path
119
this_verifier = this_tree.get_file_verifier(this_path)
120
elif this_path is None and last_path is not None:
121
yield revno, revision_id, "added " + last_path
122
if not this_ie and not last_ie:
123
# not present in either
125
elif this_ie and not last_ie:
126
yield revno, revision_id, "added " + this_path
127
elif not this_ie and last_ie:
129
yield revno, revision_id, "deleted " + last_path
122
130
elif this_path != last_path:
123
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
124
this_verifier = this_tree.get_file_verifier(this_path)
126
this_verifier = this_tree.get_file_verifier(this_path)
127
if (this_verifier != last_verifier):
128
yield revno, revision_id, "modified " + this_path
131
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
132
elif (this_ie.text_size != last_ie.text_size
133
or this_ie.text_sha1 != last_ie.text_sha1):
134
yield revno, revision_id, "modified " + this_path
130
last_verifier = this_verifier
131
137
last_path = this_path
132
last_tree = this_tree
133
if last_path is None:
141
def _enumerate_history(branch):
144
for rev_id in branch.revision_history():
145
rh.append((revno, rev_id))
138
150
def show_log(branch,
201
if isinstance(start_revision, int):
203
start_revision = revisionspec.RevisionInfo(branch, start_revision)
204
except errors.NoSuchRevision:
205
raise errors.InvalidRevisionNumber(start_revision)
207
if isinstance(end_revision, int):
209
end_revision = revisionspec.RevisionInfo(branch, end_revision)
210
except errors.NoSuchRevision:
211
raise errors.InvalidRevisionNumber(end_revision)
213
if end_revision is not None and end_revision.revno == 0:
214
raise errors.InvalidRevisionNumber(end_revision.revno)
216
209
# Build the request and execute it
217
210
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
218
211
start_revision=start_revision, end_revision=end_revision,
235
228
def make_log_request_dict(direction='reverse', specific_fileids=None,
236
229
start_revision=None, end_revision=None, limit=None,
237
message_search=None, levels=None, generate_tags=True,
230
message_search=None, levels=1, generate_tags=True,
239
232
diff_type=None, _match_using_deltas=True,
240
exclude_common_ancestry=False, match=None,
241
signature=False, omit_merges=False,
233
exclude_common_ancestry=False,
243
235
"""Convenience function for making a logging request dictionary.
283
274
:param _match_using_deltas: a private parameter controlling the
284
275
algorithm used for matching specific_fileids. This parameter
285
may be removed in the future so breezy client code should NOT
276
may be removed in the future so bzrlib client code should NOT
288
279
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
289
280
range operator or as a graph difference.
291
:param signature: show digital signature information
293
:param match: Dictionary of list of search strings to use when filtering
294
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
295
the empty string to match any of the preceding properties.
297
:param omit_merges: If True, commits with more than one parent are
301
# Take care of old style message_search parameter
304
if 'message' in match:
305
match['message'].append(message_search)
307
match['message'] = [message_search]
309
match={ 'message': [message_search] }
311
283
'direction': direction,
312
284
'specific_fileids': specific_fileids,
313
285
'start_revision': start_revision,
314
286
'end_revision': end_revision,
288
'message_search': message_search,
316
289
'levels': levels,
317
290
'generate_tags': generate_tags,
318
291
'delta_type': delta_type,
319
292
'diff_type': diff_type,
320
293
'exclude_common_ancestry': exclude_common_ancestry,
321
'signature': signature,
323
'omit_merges': omit_merges,
324
294
# Add 'private' attributes for features that may be deprecated
325
295
'_match_using_deltas': _match_using_deltas,
329
299
def _apply_log_request_defaults(rqst):
330
300
"""Apply default values to a request dictionary."""
331
result = _DEFAULT_REQUEST_PARAMS.copy()
301
result = _DEFAULT_REQUEST_PARAMS
333
303
result.update(rqst)
337
def format_signature_validity(rev_id, branch):
338
"""get the signature validity
340
:param rev_id: revision id to validate
341
:param branch: branch of revision
342
:return: human readable string to print to log
344
from breezy import gpg
346
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
347
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
348
if result[0] == gpg.SIGNATURE_VALID:
349
return u"valid signature from {0}".format(result[1])
350
if result[0] == gpg.SIGNATURE_KEY_MISSING:
351
return "unknown key {0}".format(result[1])
352
if result[0] == gpg.SIGNATURE_NOT_VALID:
353
return "invalid signature!"
354
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
355
return "no signature"
358
307
class LogGenerator(object):
359
308
"""A generator of log revisions."""
387
336
if not isinstance(lf, LogFormatter):
388
337
warn("not a LogFormatter instance: %r" % lf)
390
with self.branch.lock_read():
339
self.branch.lock_read()
391
341
if getattr(lf, 'begin_log', None):
393
343
self._show_body(lf)
394
344
if getattr(lf, 'end_log', None):
397
349
def _show_body(self, lf):
398
350
"""Show the main log output.
402
354
# Tweak the LogRequest based on what the LogFormatter can handle.
403
355
# (There's no point generating stuff if the formatter can't display it.)
405
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
406
# user didn't specify levels, use whatever the LF can handle:
407
rqst['levels'] = lf.get_levels()
357
rqst['levels'] = lf.get_levels()
409
358
if not getattr(lf, 'supports_tags', False):
410
359
rqst['generate_tags'] = False
411
360
if not getattr(lf, 'supports_delta', False):
412
361
rqst['delta_type'] = None
413
362
if not getattr(lf, 'supports_diff', False):
414
363
rqst['diff_type'] = None
415
if not getattr(lf, 'supports_signatures', False):
416
rqst['signature'] = False
418
365
# Find and print the interesting revisions
419
366
generator = self._generator_factory(self.branch, rqst)
421
for lr in generator.iter_log_revisions():
423
except errors.GhostRevisionUnusableHere:
424
raise errors.BzrCommandError(
425
gettext('Further revision history missing.'))
367
for lr in generator.iter_log_revisions():
428
371
def _generator_factory(self, branch, rqst):
429
372
"""Make the LogGenerator object to use.
431
374
Subclasses may wish to override this.
433
376
return _DefaultLogGenerator(branch, rqst)
466
407
# 0 levels means show everything; merge_depth counts from 0
467
408
if levels != 0 and merge_depth >= levels:
469
if omit_merges and len(rev.parent_ids) > 1:
472
raise errors.GhostRevisionUnusableHere(rev_id)
473
410
if diff_type is None:
476
413
diff = self._format_diff(rev, rev_id, diff_type)
478
signature = format_signature_validity(rev_id, self.branch)
481
414
yield LogRevision(rev, revno, merge_depth, delta,
482
self.rev_tag_dict.get(rev_id), diff, signature)
415
self.rev_tag_dict.get(rev_id), diff)
485
418
if log_count >= limit:
559
491
rqst.get('specific_fileids')[0], view_revisions,
560
492
include_merges=rqst.get('levels') != 1)
561
493
return make_log_rev_iterator(self.branch, view_revisions,
562
rqst.get('delta_type'), rqst.get('match'))
494
rqst.get('delta_type'), rqst.get('message_search'))
565
497
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
573
505
a list of the same tuples.
575
507
if (exclude_common_ancestry and start_rev_id == end_rev_id):
576
raise errors.BzrCommandError(gettext(
577
'--exclude-common-ancestry requires two different revisions'))
508
raise errors.BzrCommandError(
509
'--exclude-common-ancestry requires two different revisions')
578
510
if direction not in ('reverse', 'forward'):
579
raise ValueError(gettext('invalid direction %r') % direction)
580
br_rev_id = branch.last_revision()
581
if br_rev_id == _mod_revision.NULL_REVISION:
511
raise ValueError('invalid direction %r' % direction)
512
br_revno, br_rev_id = branch.last_revision_info()
584
516
if (end_rev_id and start_rev_id == end_rev_id
585
517
and (not generate_merge_revisions
586
518
or not _has_merges(branch, end_rev_id))):
587
519
# If a single revision is requested, check we can handle it
588
return _generate_one_revision(branch, end_rev_id, br_rev_id,
590
if not generate_merge_revisions:
592
# If we only want to see linear revisions, we can iterate ...
593
iter_revs = _linear_view_revisions(
594
branch, start_rev_id, end_rev_id,
595
exclude_common_ancestry=exclude_common_ancestry)
596
# If a start limit was given and it's not obviously an
597
# ancestor of the end limit, check it before outputting anything
598
if (direction == 'forward'
599
or (start_rev_id and not _is_obvious_ancestor(
600
branch, start_rev_id, end_rev_id))):
601
iter_revs = list(iter_revs)
602
if direction == 'forward':
603
iter_revs = reversed(iter_revs)
605
except _StartNotLinearAncestor:
606
# Switch to the slower implementation that may be able to find a
607
# non-obvious ancestor out of the left-hand history.
609
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
610
direction, delayed_graph_generation,
611
exclude_common_ancestry)
612
if direction == 'forward':
613
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
520
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
522
elif not generate_merge_revisions:
523
# If we only want to see linear revisions, we can iterate ...
524
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
526
if direction == 'forward':
527
iter_revs = reversed(iter_revs)
529
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
530
direction, delayed_graph_generation,
531
exclude_common_ancestry)
532
if direction == 'forward':
533
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
620
540
return [(br_rev_id, br_revno, 0)]
622
revno_str = _compute_revno_str(branch, rev_id)
542
revno = branch.revision_id_to_dotted_revno(rev_id)
543
revno_str = '.'.join(str(n) for n in revno)
623
544
return [(rev_id, revno_str, 0)]
547
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
548
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
549
# If a start limit was given and it's not obviously an
550
# ancestor of the end limit, check it before outputting anything
551
if direction == 'forward' or (start_rev_id
552
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
554
result = list(result)
555
except _StartNotLinearAncestor:
556
raise errors.BzrCommandError('Start revision not found in'
557
' left-hand history of end revision.')
626
561
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
627
562
delayed_graph_generation,
628
563
exclude_common_ancestry=False):
637
572
if delayed_graph_generation:
639
574
for rev_id, revno, depth in _linear_view_revisions(
640
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
575
branch, start_rev_id, end_rev_id):
641
576
if _has_merges(branch, rev_id):
642
577
# The end_rev_id can be nested down somewhere. We need an
643
578
# explicit ancestry check. There is an ambiguity here as we
664
599
except _StartNotLinearAncestor:
665
600
# A merge was never detected so the lower revision limit can't
666
601
# be nested down somewhere
667
raise errors.BzrCommandError(gettext('Start revision not found in'
668
' history of end revision.'))
602
raise errors.BzrCommandError('Start revision not found in'
603
' history of end revision.')
670
605
# We exit the loop above because we encounter a revision with merges, from
671
606
# this revision, we need to switch to _graph_view_revisions.
675
610
# shown naturally, i.e. just like it is for linear logging. We can easily
676
611
# make forward the exact opposite display, but showing the merge revisions
677
612
# indented at the end seems slightly nicer in that case.
678
view_revisions = itertools.chain(iter(initial_revisions),
613
view_revisions = chain(iter(initial_revisions),
679
614
_graph_view_revisions(branch, start_rev_id, end_rev_id,
680
615
rebase_initial_depths=(direction == 'reverse'),
681
616
exclude_common_ancestry=exclude_common_ancestry))
688
623
return len(parents) > 1
691
def _compute_revno_str(branch, rev_id):
692
"""Compute the revno string from a rev_id.
694
:return: The revno string, or None if the revision is not in the supplied
698
revno = branch.revision_id_to_dotted_revno(rev_id)
699
except errors.NoSuchRevision:
700
# The revision must be outside of this branch
703
return '.'.join(str(n) for n in revno)
706
626
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
707
627
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
708
628
if start_rev_id and end_rev_id:
710
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
711
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
712
except errors.NoSuchRevision:
713
# one or both is not in the branch; not obvious
629
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
630
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
715
631
if len(start_dotted) == 1 and len(end_dotted) == 1:
716
632
# both on mainline
717
633
return start_dotted[0] <= end_dotted[0]
730
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
731
exclude_common_ancestry=False):
646
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
732
647
"""Calculate a sequence of revisions to view, newest to oldest.
734
649
:param start_rev_id: the lower revision-id
735
650
:param end_rev_id: the upper revision-id
736
:param exclude_common_ancestry: Whether the start_rev_id should be part of
737
the iterated revisions.
738
651
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
739
dotted_revno will be None for ghosts
740
652
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
741
is not found walking the left-hand history
653
is not found walking the left-hand history
655
br_revno, br_rev_id = branch.last_revision_info()
743
656
repo = branch.repository
744
graph = repo.get_graph()
745
657
if start_rev_id is None and end_rev_id is None:
747
br_revno, br_rev_id = branch.last_revision_info()
748
except errors.GhostRevisionsHaveNoRevno:
749
br_rev_id = branch.last_revision()
753
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
754
(_mod_revision.NULL_REVISION,))
757
revision_id = next(graph_iter)
758
except errors.RevisionNotPresent as e:
760
yield e.revision_id, None, None
763
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
764
if cur_revno is not None:
659
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
660
yield revision_id, str(cur_revno), 0
767
br_rev_id = branch.last_revision()
768
663
if end_rev_id is None:
769
664
end_rev_id = br_rev_id
770
665
found_start = start_rev_id is None
771
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
772
(_mod_revision.NULL_REVISION,))
775
revision_id = next(graph_iter)
776
except StopIteration:
778
except errors.RevisionNotPresent as e:
780
yield e.revision_id, None, None
666
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
667
revno = branch.revision_id_to_dotted_revno(revision_id)
668
revno_str = '.'.join(str(n) for n in revno)
669
if not found_start and revision_id == start_rev_id:
670
yield revision_id, revno_str, 0
783
revno_str = _compute_revno_str(branch, revision_id)
784
if not found_start and revision_id == start_rev_id:
785
if not exclude_common_ancestry:
786
yield revision_id, revno_str, 0
790
yield revision_id, revno_str, 0
792
raise _StartNotLinearAncestor()
674
yield revision_id, revno_str, 0
677
raise _StartNotLinearAncestor()
795
680
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
836
721
yield rev_id, '.'.join(map(str, revno)), merge_depth
724
@deprecated_function(deprecated_in((2, 2, 0)))
725
def calculate_view_revisions(branch, start_revision, end_revision, direction,
726
specific_fileid, generate_merge_revisions):
727
"""Calculate the revisions to view.
729
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
730
a list of the same tuples.
732
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
734
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
735
direction, generate_merge_revisions or specific_fileid))
737
view_revisions = _filter_revisions_touching_file_id(branch,
738
specific_fileid, view_revisions,
739
include_merges=generate_merge_revisions)
740
return _rebase_merge_depth(view_revisions)
839
743
def _rebase_merge_depth(view_revisions):
840
744
"""Adjust depths upwards so the top level is 0."""
841
745
# If either the first or last revision have a merge_depth of 0, we're done
842
746
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
843
min_depth = min([d for r, n, d in view_revisions])
747
min_depth = min([d for r,n,d in view_revisions])
844
748
if min_depth != 0:
845
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
749
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
846
750
return view_revisions
864
768
# Convert view_revisions into (view, None, None) groups to fit with
865
769
# the standard interface here.
866
if isinstance(view_revisions, list):
770
if type(view_revisions) == list:
867
771
# A single batch conversion is faster than many incremental ones.
868
772
# As we have all the data, do a batch conversion.
869
773
nones = [None] * len(view_revisions)
870
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
774
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
873
777
for view in view_revisions:
885
789
return log_rev_iterator
888
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
792
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
889
793
"""Create a filtered iterator of log_rev_iterator matching on a regex.
891
795
:param branch: The branch being logged.
892
796
:param generate_delta: Whether to generate a delta for each revision.
893
:param match: A dictionary with properties as keys and lists of strings
894
as values. To match, a revision may match any of the supplied strings
895
within a single property but must match at least one string for each
797
:param search: A user text search string.
897
798
:param log_rev_iterator: An input iterator containing all revisions that
898
799
could be displayed, in lists.
899
800
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
903
804
return log_rev_iterator
904
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
905
for k, v in match.items()]
906
return _filter_re(searchRE, log_rev_iterator)
909
def _filter_re(searchRE, log_rev_iterator):
805
searchRE = re_compile_checked(search, re.IGNORECASE,
806
'log message filter')
807
return _filter_message_re(searchRE, log_rev_iterator)
810
def _filter_message_re(searchRE, log_rev_iterator):
910
811
for revs in log_rev_iterator:
911
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
915
def _match_filter(searchRE, rev):
917
'message': (rev.message,),
918
'committer': (rev.committer,),
919
'author': (rev.get_apparent_authors()),
920
'bugs': list(rev.iter_bugs())
922
strings[''] = [item for inner_list in strings.values()
923
for item in inner_list]
924
for (k, v) in searchRE:
925
if k in strings and not _match_any_filter(strings[k], v):
929
def _match_any_filter(strings, res):
930
return any(re.search(s) for re in res for s in strings)
813
for (rev_id, revno, merge_depth), rev, delta in revs:
814
if searchRE.search(rev.message):
815
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
932
819
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
933
820
fileids=None, direction='reverse'):
979
866
if delta_type == 'full' and not check_fileids:
980
867
deltas = repository.get_deltas_for_revisions(revisions)
981
for rev, delta in zip(revs, deltas):
868
for rev, delta in izip(revs, deltas):
982
869
new_revs.append((rev[0], rev[1], delta))
984
871
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
985
for rev, delta in zip(revs, deltas):
872
for rev, delta in izip(revs, deltas):
986
873
if check_fileids:
987
874
if delta is None or not delta.has_changed():
1036
923
for revs in log_rev_iterator:
1037
924
# r = revision_id, n = revno, d = merge depth
1038
925
revision_ids = [view[0] for view, _, _ in revs]
1039
revisions = dict(repository.iter_revisions(revision_ids))
1040
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
926
revisions = repository.get_revisions(revision_ids)
927
revs = [(rev[0], revision, rev[2]) for rev, revision in
928
izip(revs, revisions)]
1043
932
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1077
968
:return: (start_rev_id, end_rev_id) tuple.
970
branch_revno, branch_rev_id = branch.last_revision_info()
1079
971
start_rev_id = None
1081
if start_revision is not None:
1082
if not isinstance(start_revision, revisionspec.RevisionInfo):
1083
raise TypeError(start_revision)
1084
start_rev_id = start_revision.rev_id
1085
start_revno = start_revision.revno
1086
if start_revno is None:
972
if start_revision is None:
975
if isinstance(start_revision, revisionspec.RevisionInfo):
976
start_rev_id = start_revision.rev_id
977
start_revno = start_revision.revno or 1
979
branch.check_real_revno(start_revision)
980
start_revno = start_revision
981
start_rev_id = branch.get_rev_id(start_revno)
1089
983
end_rev_id = None
1091
if end_revision is not None:
1092
if not isinstance(end_revision, revisionspec.RevisionInfo):
1093
raise TypeError(start_revision)
1094
end_rev_id = end_revision.rev_id
1095
end_revno = end_revision.revno
1096
if end_revno is None:
1098
end_revno = branch.revno()
1099
except errors.GhostRevisionsHaveNoRevno:
984
if end_revision is None:
985
end_revno = branch_revno
987
if isinstance(end_revision, revisionspec.RevisionInfo):
988
end_rev_id = end_revision.rev_id
989
end_revno = end_revision.revno or branch_revno
991
branch.check_real_revno(end_revision)
992
end_revno = end_revision
993
end_rev_id = branch.get_rev_id(end_revno)
1102
if branch.last_revision() != _mod_revision.NULL_REVISION:
995
if branch_revno != 0:
1103
996
if (start_rev_id == _mod_revision.NULL_REVISION
1104
997
or end_rev_id == _mod_revision.NULL_REVISION):
1105
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1106
if end_revno is not None and start_revno > end_revno:
1107
raise errors.BzrCommandError(gettext("Start revision must be "
1108
"older than the end revision."))
998
raise errors.BzrCommandError('Logging revision 0 is invalid.')
999
if start_revno > end_revno:
1000
raise errors.BzrCommandError("Start revision must be older than "
1001
"the end revision.")
1109
1002
return (start_rev_id, end_rev_id)
1161
1054
if ((start_rev_id == _mod_revision.NULL_REVISION)
1162
1055
or (end_rev_id == _mod_revision.NULL_REVISION)):
1163
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1056
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1164
1057
if start_revno > end_revno:
1165
raise errors.BzrCommandError(gettext("Start revision must be older "
1166
"than the end revision."))
1058
raise errors.BzrCommandError("Start revision must be older than "
1059
"the end revision.")
1168
1061
if end_revno < start_revno:
1169
1062
return None, None, None, None
1170
1063
cur_revno = branch_revno
1172
1065
mainline_revs = []
1173
graph = branch.repository.get_graph()
1174
for revision_id in graph.iter_lefthand_ancestry(
1175
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1066
for revision_id in branch.repository.iter_reverse_revision_history(
1067
branch_last_revision):
1176
1068
if cur_revno < start_revno:
1177
1069
# We have gone far enough, but we always add 1 more revision
1178
1070
rev_nos[revision_id] = cur_revno
1192
1084
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1087
@deprecated_function(deprecated_in((2, 2, 0)))
1088
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1089
"""Filter view_revisions based on revision ranges.
1091
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1092
tuples to be filtered.
1094
:param start_rev_id: If not NONE specifies the first revision to be logged.
1095
If NONE then all revisions up to the end_rev_id are logged.
1097
:param end_rev_id: If not NONE specifies the last revision to be logged.
1098
If NONE then all revisions up to the end of the log are logged.
1100
:return: The filtered view_revisions.
1102
if start_rev_id or end_rev_id:
1103
revision_ids = [r for r, n, d in view_revisions]
1105
start_index = revision_ids.index(start_rev_id)
1108
if start_rev_id == end_rev_id:
1109
end_index = start_index
1112
end_index = revision_ids.index(end_rev_id)
1114
end_index = len(view_revisions) - 1
1115
# To include the revisions merged into the last revision,
1116
# extend end_rev_id down to, but not including, the next rev
1117
# with the same or lesser merge_depth
1118
end_merge_depth = view_revisions[end_index][2]
1120
for index in xrange(end_index+1, len(view_revisions)+1):
1121
if view_revisions[index][2] <= end_merge_depth:
1122
end_index = index - 1
1125
# if the search falls off the end then log to the end as well
1126
end_index = len(view_revisions) - 1
1127
view_revisions = view_revisions[start_index:end_index+1]
1128
return view_revisions
1195
1131
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1196
1132
include_merges=True):
1197
1133
r"""Return the list of revision ids which touch a given file id.
1244
1177
# indexing layer. We might consider passing in hints as to the known
1245
1178
# access pattern (sparse/clustered, high success rate/low success
1246
1179
# rate). This particular access is clustered with a low success rate.
1180
get_parent_map = branch.repository.texts.get_parent_map
1247
1181
modified_text_revisions = set()
1248
1182
chunk_size = 1000
1249
for start in range(0, len(text_keys), chunk_size):
1183
for start in xrange(0, len(text_keys), chunk_size):
1250
1184
next_keys = text_keys[start:start + chunk_size]
1251
1185
# Only keep the revision_id portion of the key
1252
1186
modified_text_revisions.update(
1213
@deprecated_function(deprecated_in((2, 2, 0)))
1214
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1215
include_merges=True):
1216
"""Produce an iterator of revisions to show
1217
:return: an iterator of (revision_id, revno, merge_depth)
1218
(if there is no revno for a revision, None is supplied)
1220
if not include_merges:
1221
revision_ids = mainline_revs[1:]
1222
if direction == 'reverse':
1223
revision_ids.reverse()
1224
for revision_id in revision_ids:
1225
yield revision_id, str(rev_nos[revision_id]), 0
1227
graph = branch.repository.get_graph()
1228
# This asks for all mainline revisions, which means we only have to spider
1229
# sideways, rather than depth history. That said, its still size-of-history
1230
# and should be addressed.
1231
# mainline_revisions always includes an extra revision at the beginning, so
1233
parent_map = dict(((key, value) for key, value in
1234
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1235
# filter out ghosts; merge_sort errors on ghosts.
1236
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1237
merge_sorted_revisions = tsort.merge_sort(
1241
generate_revno=True)
1243
if direction == 'forward':
1244
# forward means oldest first.
1245
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1246
elif direction != 'reverse':
1247
raise ValueError('invalid direction %r' % direction)
1249
for (sequence, rev_id, merge_depth, revno, end_of_merge
1250
) in merge_sorted_revisions:
1251
yield rev_id, '.'.join(map(str, revno)), merge_depth
1279
1254
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1280
1255
"""Reverse revisions by depth.
1341
1312
to indicate which LogRevision attributes it supports:
1343
1314
- supports_delta must be True if this log formatter supports delta.
1344
Otherwise the delta attribute may not be populated. The 'delta_format'
1345
attribute describes whether the 'short_status' format (1) or the long
1346
one (2) should be used.
1315
Otherwise the delta attribute may not be populated. The 'delta_format'
1316
attribute describes whether the 'short_status' format (1) or the long
1317
one (2) should be used.
1348
1319
- supports_merge_revisions must be True if this log formatter supports
1349
merge revisions. If not, then only mainline revisions will be passed
1320
merge revisions. If not, then only mainline revisions will be passed
1352
1323
- preferred_levels is the number of levels this formatter defaults to.
1353
The default value is zero meaning display all levels.
1354
This value is only relevant if supports_merge_revisions is True.
1324
The default value is zero meaning display all levels.
1325
This value is only relevant if supports_merge_revisions is True.
1356
1327
- supports_tags must be True if this log formatter supports tags.
1357
Otherwise the tags attribute may not be populated.
1328
Otherwise the tags attribute may not be populated.
1359
1330
- supports_diff must be True if this log formatter supports diffs.
1360
Otherwise the diff attribute may not be populated.
1362
- supports_signatures must be True if this log formatter supports GPG
1331
Otherwise the diff attribute may not be populated.
1365
1333
Plugins can register functions to show custom revision properties using
1366
1334
the properties_handler_registry. The registered function
1367
must respect the following interface description::
1335
must respect the following interface description:
1369
1336
def my_show_properties(properties_dict):
1370
1337
# code that returns a dict {'name':'value'} of the properties
1375
1342
def __init__(self, to_file, show_ids=False, show_timezone='original',
1376
1343
delta_format=None, levels=None, show_advice=False,
1377
to_exact_file=None, author_list_handler=None):
1344
to_exact_file=None):
1378
1345
"""Create a LogFormatter.
1380
1347
:param to_file: the file to output to
1381
:param to_exact_file: if set, gives an output stream to which
1348
:param to_exact_file: if set, gives an output stream to which
1382
1349
non-Unicode diffs are written.
1383
1350
:param show_ids: if True, revision-ids are to be displayed
1384
1351
:param show_timezone: the timezone to use
1450
1414
def short_author(self, rev):
1451
return self.authors(rev, 'first', short=True, sep=', ')
1453
def authors(self, rev, who, short=False, sep=None):
1454
"""Generate list of authors, taking --authors option into account.
1456
The caller has to specify the name of a author list handler,
1457
as provided by the author list registry, using the ``who``
1458
argument. That name only sets a default, though: when the
1459
user selected a different author list generation using the
1460
``--authors`` command line switch, as represented by the
1461
``author_list_handler`` constructor argument, that value takes
1464
:param rev: The revision for which to generate the list of authors.
1465
:param who: Name of the default handler.
1466
:param short: Whether to shorten names to either name or address.
1467
:param sep: What separator to use for automatic concatenation.
1469
if self._author_list_handler is not None:
1470
# The user did specify --authors, which overrides the default
1471
author_list_handler = self._author_list_handler
1473
# The user didn't specify --authors, so we use the caller's default
1474
author_list_handler = author_list_registry.get(who)
1475
names = author_list_handler(rev)
1477
for i in range(len(names)):
1478
name, address = config.parse_username(names[i])
1484
names = sep.join(names)
1415
name, address = config.parse_username(rev.get_apparent_authors()[0])
1487
1420
def merge_marker(self, revision):
1488
1421
"""Get the merge marker to include in the output or '' if none."""
1584
1516
self.merge_marker(revision)))
1585
1517
if revision.tags:
1586
1518
lines.append('tags: %s' % (', '.join(revision.tags)))
1587
if self.show_ids or revision.revno is None:
1588
1520
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1590
1521
for parent_id in revision.rev.parent_ids:
1591
1522
lines.append('parent: %s' % (parent_id,))
1592
1523
lines.extend(self.custom_properties(revision.rev))
1594
1525
committer = revision.rev.committer
1595
authors = self.authors(revision.rev, 'all')
1526
authors = revision.rev.get_apparent_authors()
1596
1527
if authors != [committer]:
1597
1528
lines.append('author: %s' % (", ".join(authors),))
1598
1529
lines.append('committer: %s' % (committer,))
1619
1547
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1620
1548
if revision.delta is not None:
1621
1549
# Use the standard status output to display changes
1622
from breezy.delta import report_delta
1623
report_delta(to_file, revision.delta, short_status=False,
1550
from bzrlib.delta import report_delta
1551
report_delta(to_file, revision.delta, short_status=False,
1624
1552
show_ids=self.show_ids, indent=indent)
1625
1553
if revision.diff is not None:
1626
1554
to_file.write(indent + 'diff:\n')
1672
1600
if revision.tags:
1673
1601
tags = ' {%s}' % (', '.join(revision.tags))
1674
1602
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1675
revision.revno or "", self.short_author(revision.rev),
1603
revision.revno, self.short_author(revision.rev),
1676
1604
format_date(revision.rev.timestamp,
1677
1605
revision.rev.timezone or 0,
1678
1606
self.show_timezone, date_fmt="%Y-%m-%d",
1679
1607
show_offset=False),
1680
1608
tags, self.merge_marker(revision)))
1681
1609
self.show_properties(revision.rev, indent+offset)
1682
if self.show_ids or revision.revno is None:
1683
1611
to_file.write(indent + offset + 'revision-id:%s\n'
1684
1612
% (revision.rev.revision_id,))
1685
1613
if not revision.rev.message:
1692
1620
if revision.delta is not None:
1693
1621
# Use the standard status output to display changes
1694
from breezy.delta import report_delta
1695
report_delta(to_file, revision.delta,
1696
short_status=self.delta_format==1,
1622
from bzrlib.delta import report_delta
1623
report_delta(to_file, revision.delta,
1624
short_status=self.delta_format==1,
1697
1625
show_ids=self.show_ids, indent=indent + offset)
1698
1626
if revision.diff is not None:
1699
1627
self.show_diff(self.to_exact_file, revision.diff, ' ')
1739
1667
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1740
1668
"""Format log info into one string. Truncate tail of string
1742
:param revno: revision number or None.
1743
Revision numbers counts from 1.
1744
:param rev: revision object
1745
:param max_chars: maximum length of resulting string
1746
:param tags: list of tags or None
1747
:param prefix: string to prefix each line
1748
:return: formatted truncated string
1669
:param revno: revision number or None.
1670
Revision numbers counts from 1.
1671
:param rev: revision object
1672
:param max_chars: maximum length of resulting string
1673
:param tags: list of tags or None
1674
:param prefix: string to prefix each line
1675
:return: formatted truncated string
1752
1679
# show revno only when is not None
1753
1680
out.append("%s:" % revno)
1754
if max_chars is not None:
1755
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1757
out.append(self.short_author(rev))
1681
out.append(self.truncate(self.short_author(rev), 20))
1758
1682
out.append(self.date_string(rev))
1759
1683
if len(rev.parent_ids) > 1:
1760
1684
out.append('[merge]')
1779
1703
self.show_timezone,
1780
1704
date_fmt='%Y-%m-%d',
1781
1705
show_offset=False)
1782
committer_str = self.authors(revision.rev, 'first', sep=', ')
1783
committer_str = committer_str.replace(' <', ' <')
1784
to_file.write('%s %s\n\n' % (date_str, committer_str))
1706
committer_str = revision.rev.get_apparent_authors()[0].replace (' <', ' <')
1707
to_file.write('%s %s\n\n' % (date_str,committer_str))
1786
1709
if revision.delta is not None and revision.delta.has_changed():
1787
1710
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1789
1712
to_file.write('\t* %s:\n' % (path,))
1790
1713
for c in revision.delta.renamed:
1791
oldpath, newpath = c[:2]
1714
oldpath,newpath = c[:2]
1792
1715
# For renamed files, show both the old and the new path
1793
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1716
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1794
1717
to_file.write('\n')
1796
1719
if not revision.rev.message:
1819
1742
return self.get(name)(*args, **kwargs)
1821
1744
def get_default(self, branch):
1822
c = branch.get_config_stack()
1823
return self.get(c.get('log_format'))
1745
return self.get(branch.get_config().log_format())
1826
1748
log_formatter_registry = LogFormatterRegistry()
1829
1751
log_formatter_registry.register('short', ShortLogFormatter,
1830
'Moderately short log format.')
1752
'Moderately short log format')
1831
1753
log_formatter_registry.register('long', LongLogFormatter,
1832
'Detailed log format.')
1754
'Detailed log format')
1833
1755
log_formatter_registry.register('line', LineLogFormatter,
1834
'Log format with one line per revision.')
1756
'Log format with one line per revision')
1835
1757
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1836
'Format used by GNU ChangeLog files.')
1758
'Format used by GNU ChangeLog files')
1839
1761
def register_formatter(name, formatter):
1850
1772
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1851
1773
except KeyError:
1852
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1855
def author_list_all(rev):
1856
return rev.get_apparent_authors()[:]
1859
def author_list_first(rev):
1860
lst = rev.get_apparent_authors()
1867
def author_list_committer(rev):
1868
return [rev.committer]
1871
author_list_registry = registry.Registry()
1873
author_list_registry.register('all', author_list_all,
1876
author_list_registry.register('first', author_list_first,
1879
author_list_registry.register('committer', author_list_committer,
1774
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1777
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1778
# deprecated; for compatibility
1779
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1780
lf.show(revno, rev, delta)
1883
1783
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
2012
1912
output.write('Added Revisions:\n')
2013
1913
start_revno = new_revno - len(new_history) + 1
2014
1914
show_log(branch, lf, None, verbose=False, direction='forward',
2015
start_revision=start_revno)
1915
start_revision=start_revno,)
2018
1918
def show_flat_log(repository, history, last_revno, lf):
2040
1940
:param file_list: the list of paths given on the command line;
2041
1941
the first of these can be a branch location or a file path,
2042
1942
the remainder must be file paths
2043
:param add_cleanup: When the branch returned is read locked,
2044
an unlock call will be queued to the cleanup.
2045
1943
:return: (branch, info_list, start_rev_info, end_rev_info) where
2046
1944
info_list is a list of (relative_path, file_id, kind) tuples where
2047
1945
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2048
1946
branch will be read-locked.
2050
from breezy.builtins import _get_revision_range
2051
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2053
add_cleanup(b.lock_read().unlock)
1948
from builtins import _get_revision_range, safe_relpath_files
1949
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2054
1951
# XXX: It's damn messy converting a list of paths to relative paths when
2055
1952
# those paths might be deleted ones, they might be on a case-insensitive
2056
1953
# filesystem and/or they might be in silly locations (like another branch).
2121
2018
tree1 = b.repository.revision_tree(rev_id)
2122
2019
file_id = tree1.path2id(fp)
2123
kind = _get_kind_for_file_id(tree1, fp, file_id)
2020
kind = _get_kind_for_file_id(tree1, file_id)
2124
2021
info_list.append((fp, file_id, kind))
2125
2022
return b, info_list, start_rev_info, end_rev_info
2128
def _get_kind_for_file_id(tree, path, file_id):
2025
def _get_kind_for_file_id(tree, file_id):
2129
2026
"""Return the kind of a file-id or None if it doesn't exist."""
2130
2027
if file_id is not None:
2131
return tree.kind(path, file_id)
2028
return tree.kind(file_id)
2138
2035
# Use the properties handlers to print out bug information if available
2139
2036
def _bugs_properties_handler(revision):
2140
if 'bugs' in revision.properties:
2037
if revision.properties.has_key('bugs'):
2141
2038
bug_lines = revision.properties['bugs'].split('\n')
2142
2039
bug_rows = [line.split(' ', 1) for line in bug_lines]
2143
2040
fixed_bug_urls = [row[0] for row in bug_rows if
2144
2041
len(row) > 1 and row[1] == 'fixed']
2146
2043
if fixed_bug_urls:
2147
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2148
' '.join(fixed_bug_urls)}
2044
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2151
2047
properties_handler_registry.register('bugs_properties_handler',