101
75
TODO: Perhaps some way to limit this to only particular revisions,
102
76
or to traverse a non-mainline set of revisions?
104
last_verifier = last_tree.get_file_verifier(last_path)
105
graph = repository.get_graph()
106
history = list(graph.iter_lefthand_ancestry(last_revision, []))
108
for revision_id in history:
109
this_tree = repository.revision_tree(revision_id)
110
this_intertree = InterTree.get(this_tree, last_tree)
111
this_path = this_intertree.find_source_path(last_path)
81
for revision_id in branch.revision_history():
82
this_inv = branch.repository.get_revision_inventory(revision_id)
83
if file_id in this_inv:
84
this_ie = this_inv[file_id]
85
this_path = this_inv.id2path(file_id)
87
this_ie = this_path = None
113
89
# now we know how it was last time, and how it is in this revision.
114
90
# are those two states effectively the same or not?
115
if this_path is not None and last_path is None:
116
yield revno, revision_id, "deleted " + this_path
117
this_verifier = this_tree.get_file_verifier(this_path)
118
elif this_path is None and last_path is not None:
119
yield revno, revision_id, "added " + last_path
92
if not this_ie and not last_ie:
93
# not present in either
95
elif this_ie and not last_ie:
96
yield revno, revision_id, "added " + this_path
97
elif not this_ie and last_ie:
99
yield revno, revision_id, "deleted " + last_path
120
100
elif this_path != last_path:
121
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
122
this_verifier = this_tree.get_file_verifier(this_path)
124
this_verifier = this_tree.get_file_verifier(this_path)
125
if (this_verifier != last_verifier):
126
yield revno, revision_id, "modified " + this_path
101
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
102
elif (this_ie.text_size != last_ie.text_size
103
or this_ie.text_sha1 != last_ie.text_sha1):
104
yield revno, revision_id, "modified " + this_path
128
last_verifier = this_verifier
129
107
last_path = this_path
130
last_tree = this_tree
131
if last_path is None:
112
def _enumerate_history(branch):
115
for rev_id in branch.revision_history():
116
rh.append((revno, rev_id))
136
121
def show_log(branch,
123
specific_fileid=None,
139
125
direction='reverse',
140
126
start_revision=None,
141
127
end_revision=None,
145
129
"""Write out human-readable log of commits to this branch.
147
This function is being retained for backwards compatibility but
148
should not be extended with new parameters. Use the new Logger class
149
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
150
make_log_request_dict function.
152
:param lf: The LogFormatter object showing the output.
154
:param verbose: If True show added/changed/deleted/renamed files.
156
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
159
:param start_revision: If not None, only show revisions >= start_revision
161
:param end_revision: If not None, only show revisions <= end_revision
163
:param limit: If set, shows only 'limit' revisions, all revisions are shown
166
:param show_diff: If True, output a diff after each revision.
168
:param match: Dictionary of search lists to use when matching revision
180
if isinstance(start_revision, int):
182
start_revision = revisionspec.RevisionInfo(branch, start_revision)
183
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
184
raise errors.InvalidRevisionNumber(start_revision)
186
if isinstance(end_revision, int):
188
end_revision = revisionspec.RevisionInfo(branch, end_revision)
189
except (errors.NoSuchRevision, errors.RevnoOutOfBounds):
190
raise errors.InvalidRevisionNumber(end_revision)
192
if end_revision is not None and end_revision.revno == 0:
193
raise errors.InvalidRevisionNumber(end_revision.revno)
195
# Build the request and execute it
196
rqst = make_log_request_dict(
198
start_revision=start_revision, end_revision=end_revision,
199
limit=limit, delta_type=delta_type, diff_type=diff_type)
200
Logger(branch, rqst).show(lf)
203
# Note: This needs to be kept in sync with the defaults in
204
# make_log_request_dict() below
205
_DEFAULT_REQUEST_PARAMS = {
206
'direction': 'reverse',
208
'generate_tags': True,
209
'exclude_common_ancestry': False,
210
'_match_using_deltas': True,
214
def make_log_request_dict(direction='reverse', specific_files=None,
215
start_revision=None, end_revision=None, limit=None,
216
message_search=None, levels=None, generate_tags=True,
218
diff_type=None, _match_using_deltas=True,
219
exclude_common_ancestry=False, match=None,
220
signature=False, omit_merges=False,
222
"""Convenience function for making a logging request dictionary.
224
Using this function may make code slightly safer by ensuring
225
parameters have the correct names. It also provides a reference
226
point for documenting the supported parameters.
228
:param direction: 'reverse' (default) is latest to earliest;
229
'forward' is earliest to latest.
231
:param specific_files: If not None, only include revisions
232
affecting the specified files, rather than all revisions.
234
:param start_revision: If not None, only generate
235
revisions >= start_revision
237
:param end_revision: If not None, only generate
238
revisions <= end_revision
240
:param limit: If set, generate only 'limit' revisions, all revisions
241
are shown if None or 0.
243
:param message_search: If not None, only include revisions with
244
matching commit messages
246
:param levels: the number of levels of revisions to
247
generate; 1 for just the mainline; 0 for all levels, or None for
250
:param generate_tags: If True, include tags for matched revisions.
252
:param delta_type: Either 'full', 'partial' or None.
253
'full' means generate the complete delta - adds/deletes/modifies/etc;
254
'partial' means filter the delta using specific_files;
255
None means do not generate any delta.
257
:param diff_type: Either 'full', 'partial' or None.
258
'full' means generate the complete diff - adds/deletes/modifies/etc;
259
'partial' means filter the diff using specific_files;
260
None means do not generate any diff.
262
:param _match_using_deltas: a private parameter controlling the
263
algorithm used for matching specific_files. This parameter
264
may be removed in the future so breezy client code should NOT
267
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
268
range operator or as a graph difference.
270
:param signature: show digital signature information
272
:param match: Dictionary of list of search strings to use when filtering
273
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
274
the empty string to match any of the preceding properties.
276
:param omit_merges: If True, commits with more than one parent are
280
# Take care of old style message_search parameter
283
if 'message' in match:
284
match['message'].append(message_search)
286
match['message'] = [message_search]
288
match = {'message': [message_search]}
290
'direction': direction,
291
'specific_files': specific_files,
292
'start_revision': start_revision,
293
'end_revision': end_revision,
296
'generate_tags': generate_tags,
297
'delta_type': delta_type,
298
'diff_type': diff_type,
299
'exclude_common_ancestry': exclude_common_ancestry,
300
'signature': signature,
302
'omit_merges': omit_merges,
303
# Add 'private' attributes for features that may be deprecated
304
'_match_using_deltas': _match_using_deltas,
308
def _apply_log_request_defaults(rqst):
309
"""Apply default values to a request dictionary."""
310
result = _DEFAULT_REQUEST_PARAMS.copy()
316
def format_signature_validity(rev_id, branch):
317
"""get the signature validity
319
:param rev_id: revision id to validate
320
:param branch: branch of revision
321
:return: human readable string to print to log
323
from breezy import gpg
325
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
326
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
327
if result[0] == gpg.SIGNATURE_VALID:
328
return u"valid signature from {0}".format(result[1])
329
if result[0] == gpg.SIGNATURE_KEY_MISSING:
330
return "unknown key {0}".format(result[1])
331
if result[0] == gpg.SIGNATURE_NOT_VALID:
332
return "invalid signature!"
333
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
334
return "no signature"
337
class LogGenerator(object):
338
"""A generator of log revisions."""
340
def iter_log_revisions(self):
341
"""Iterate over LogRevision objects.
343
:return: An iterator yielding LogRevision objects.
345
raise NotImplementedError(self.iter_log_revisions)
348
class Logger(object):
349
"""An object that generates, formats and displays a log."""
351
def __init__(self, branch, rqst):
354
:param branch: the branch to log
355
:param rqst: A dictionary specifying the query parameters.
356
See make_log_request_dict() for supported values.
359
self.rqst = _apply_log_request_defaults(rqst)
364
:param lf: The LogFormatter object to send the output to.
366
if not isinstance(lf, LogFormatter):
367
warn("not a LogFormatter instance: %r" % lf)
369
with self.branch.lock_read():
370
if getattr(lf, 'begin_log', None):
373
if getattr(lf, 'end_log', None):
376
def _show_body(self, lf):
377
"""Show the main log output.
379
Subclasses may wish to override this.
381
# Tweak the LogRequest based on what the LogFormatter can handle.
382
# (There's no point generating stuff if the formatter can't display it.)
384
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
385
# user didn't specify levels, use whatever the LF can handle:
386
rqst['levels'] = lf.get_levels()
388
if not getattr(lf, 'supports_tags', False):
389
rqst['generate_tags'] = False
390
if not getattr(lf, 'supports_delta', False):
391
rqst['delta_type'] = None
392
if not getattr(lf, 'supports_diff', False):
393
rqst['diff_type'] = None
394
if not getattr(lf, 'supports_signatures', False):
395
rqst['signature'] = False
397
# Find and print the interesting revisions
398
generator = self._generator_factory(self.branch, rqst)
400
for lr in generator.iter_log_revisions():
402
except errors.GhostRevisionUnusableHere:
403
raise errors.CommandError(
404
gettext('Further revision history missing.'))
407
def _generator_factory(self, branch, rqst):
408
"""Make the LogGenerator object to use.
410
Subclasses may wish to override this.
412
return _DefaultLogGenerator(branch, **rqst)
415
def _log_revision_iterator_using_per_file_graph(
416
branch, delta_type, match, levels, path, start_rev_id, end_rev_id,
417
direction, exclude_common_ancestry):
418
# Get the base revisions, filtering by the revision range.
419
# Note that we always generate the merge revisions because
420
# filter_revisions_touching_path() requires them ...
421
view_revisions = _calc_view_revisions(
422
branch, start_rev_id, end_rev_id,
423
direction, generate_merge_revisions=True,
424
exclude_common_ancestry=exclude_common_ancestry)
425
if not isinstance(view_revisions, list):
426
view_revisions = list(view_revisions)
427
view_revisions = _filter_revisions_touching_path(
428
branch, path, view_revisions,
429
include_merges=levels != 1)
430
return make_log_rev_iterator(
431
branch, view_revisions, delta_type, match)
434
def _log_revision_iterator_using_delta_matching(
435
branch, delta_type, match, levels, specific_files, start_rev_id, end_rev_id,
436
direction, exclude_common_ancestry, limit):
437
# Get the base revisions, filtering by the revision range
438
generate_merge_revisions = levels != 1
439
delayed_graph_generation = not specific_files and (
440
limit or start_rev_id or end_rev_id)
441
view_revisions = _calc_view_revisions(
442
branch, start_rev_id, end_rev_id,
444
generate_merge_revisions=generate_merge_revisions,
445
delayed_graph_generation=delayed_graph_generation,
446
exclude_common_ancestry=exclude_common_ancestry)
448
# Apply the other filters
449
return make_log_rev_iterator(branch, view_revisions,
451
files=specific_files,
455
def _format_diff(branch, rev, diff_type, files=None):
458
:param branch: Branch object
459
:param rev: Revision object
460
:param diff_type: Type of diff to generate
461
:param files: List of files to generate diff for (or None for all)
463
repo = branch.repository
464
if len(rev.parent_ids) == 0:
465
ancestor_id = _mod_revision.NULL_REVISION
467
ancestor_id = rev.parent_ids[0]
468
tree_1 = repo.revision_tree(ancestor_id)
469
tree_2 = repo.revision_tree(rev.revision_id)
470
if diff_type == 'partial' and files is not None:
471
specific_files = files
473
specific_files = None
475
path_encoding = get_diff_header_encoding()
476
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
477
new_label='', path_encoding=path_encoding)
481
class _StartNotLinearAncestor(Exception):
482
"""Raised when a start revision is not found walking left-hand history."""
485
class _DefaultLogGenerator(LogGenerator):
486
"""The default generator of log revisions."""
489
self, branch, levels=None, limit=None, diff_type=None,
490
delta_type=None, show_signature=None, omit_merges=None,
491
generate_tags=None, specific_files=None, match=None,
492
start_revision=None, end_revision=None, direction=None,
493
exclude_common_ancestry=None, _match_using_deltas=None,
498
self.diff_type = diff_type
499
self.delta_type = delta_type
500
self.show_signature = signature
501
self.omit_merges = omit_merges
502
self.specific_files = specific_files
504
self.start_revision = start_revision
505
self.end_revision = end_revision
506
self.direction = direction
507
self.exclude_common_ancestry = exclude_common_ancestry
508
self._match_using_deltas = _match_using_deltas
509
if generate_tags and branch.supports_tags():
510
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
512
self.rev_tag_dict = {}
514
def iter_log_revisions(self):
515
"""Iterate over LogRevision objects.
517
:return: An iterator yielding LogRevision objects.
520
revision_iterator = self._create_log_revision_iterator()
521
for revs in revision_iterator:
522
for (rev_id, revno, merge_depth), rev, delta in revs:
523
# 0 levels means show everything; merge_depth counts from 0
524
if (self.levels != 0 and merge_depth is not None and
525
merge_depth >= self.levels):
527
if self.omit_merges and len(rev.parent_ids) > 1:
530
raise errors.GhostRevisionUnusableHere(rev_id)
531
if self.diff_type is None:
535
self.branch, rev, self.diff_type,
537
if self.show_signature:
538
signature = format_signature_validity(rev_id, self.branch)
542
rev, revno, merge_depth, delta,
543
self.rev_tag_dict.get(rev_id), diff, signature)
546
if log_count >= self.limit:
549
def _create_log_revision_iterator(self):
550
"""Create a revision iterator for log.
552
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
555
start_rev_id, end_rev_id = _get_revision_limits(
556
self.branch, self.start_revision, self.end_revision)
557
if self._match_using_deltas:
558
return _log_revision_iterator_using_delta_matching(
560
delta_type=self.delta_type,
563
specific_files=self.specific_files,
564
start_rev_id=start_rev_id, end_rev_id=end_rev_id,
565
direction=self.direction,
566
exclude_common_ancestry=self.exclude_common_ancestry,
569
# We're using the per-file-graph algorithm. This scales really
570
# well but only makes sense if there is a single file and it's
572
file_count = len(self.specific_files)
574
raise errors.BzrError(
575
"illegal LogRequest: must match-using-deltas "
576
"when logging %d files" % file_count)
577
return _log_revision_iterator_using_per_file_graph(
579
delta_type=self.delta_type,
582
path=self.specific_files[0],
583
start_rev_id=start_rev_id, end_rev_id=end_rev_id,
584
direction=self.direction,
585
exclude_common_ancestry=self.exclude_common_ancestry
589
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
590
generate_merge_revisions,
591
delayed_graph_generation=False,
592
exclude_common_ancestry=False,
594
"""Calculate the revisions to view.
596
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
597
a list of the same tuples.
599
if (exclude_common_ancestry and start_rev_id == end_rev_id):
600
raise errors.CommandError(gettext(
601
'--exclude-common-ancestry requires two different revisions'))
602
if direction not in ('reverse', 'forward'):
603
raise ValueError(gettext('invalid direction %r') % direction)
604
br_rev_id = branch.last_revision()
605
if br_rev_id == _mod_revision.NULL_REVISION:
608
if (end_rev_id and start_rev_id == end_rev_id
609
and (not generate_merge_revisions
610
or not _has_merges(branch, end_rev_id))):
611
# If a single revision is requested, check we can handle it
612
return _generate_one_revision(branch, end_rev_id, br_rev_id,
614
if not generate_merge_revisions:
616
# If we only want to see linear revisions, we can iterate ...
617
iter_revs = _linear_view_revisions(
618
branch, start_rev_id, end_rev_id,
619
exclude_common_ancestry=exclude_common_ancestry)
620
# If a start limit was given and it's not obviously an
621
# ancestor of the end limit, check it before outputting anything
622
if (direction == 'forward'
623
or (start_rev_id and not _is_obvious_ancestor(
624
branch, start_rev_id, end_rev_id))):
625
iter_revs = list(iter_revs)
626
if direction == 'forward':
627
iter_revs = reversed(iter_revs)
629
except _StartNotLinearAncestor:
630
# Switch to the slower implementation that may be able to find a
631
# non-obvious ancestor out of the left-hand history.
633
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
634
direction, delayed_graph_generation,
635
exclude_common_ancestry)
636
if direction == 'forward':
637
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
641
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
642
if rev_id == br_rev_id:
644
return [(br_rev_id, br_revno, 0)]
646
revno_str = _compute_revno_str(branch, rev_id)
647
return [(rev_id, revno_str, 0)]
650
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
651
delayed_graph_generation,
652
exclude_common_ancestry=False):
653
# On large trees, generating the merge graph can take 30-60 seconds
654
# so we delay doing it until a merge is detected, incrementally
655
# returning initial (non-merge) revisions while we can.
657
# The above is only true for old formats (<= 0.92), for newer formats, a
658
# couple of seconds only should be needed to load the whole graph and the
659
# other graph operations needed are even faster than that -- vila 100201
660
initial_revisions = []
661
if delayed_graph_generation:
663
for rev_id, revno, depth in _linear_view_revisions(
664
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
665
if _has_merges(branch, rev_id):
666
# The end_rev_id can be nested down somewhere. We need an
667
# explicit ancestry check. There is an ambiguity here as we
668
# may not raise _StartNotLinearAncestor for a revision that
669
# is an ancestor but not a *linear* one. But since we have
670
# loaded the graph to do the check (or calculate a dotted
671
# revno), we may as well accept to show the log... We need
672
# the check only if start_rev_id is not None as all
673
# revisions have _mod_revision.NULL_REVISION as an ancestor
675
graph = branch.repository.get_graph()
676
if (start_rev_id is not None
677
and not graph.is_ancestor(start_rev_id, end_rev_id)):
678
raise _StartNotLinearAncestor()
679
# Since we collected the revisions so far, we need to
684
initial_revisions.append((rev_id, revno, depth))
686
# No merged revisions found
687
return initial_revisions
688
except _StartNotLinearAncestor:
689
# A merge was never detected so the lower revision limit can't
690
# be nested down somewhere
691
raise errors.CommandError(gettext('Start revision not found in'
692
' history of end revision.'))
694
# We exit the loop above because we encounter a revision with merges, from
695
# this revision, we need to switch to _graph_view_revisions.
697
# A log including nested merges is required. If the direction is reverse,
698
# we rebase the initial merge depths so that the development line is
699
# shown naturally, i.e. just like it is for linear logging. We can easily
700
# make forward the exact opposite display, but showing the merge revisions
701
# indented at the end seems slightly nicer in that case.
702
view_revisions = itertools.chain(iter(initial_revisions),
703
_graph_view_revisions(branch, start_rev_id, end_rev_id,
704
rebase_initial_depths=(
705
direction == 'reverse'),
706
exclude_common_ancestry=exclude_common_ancestry))
707
return view_revisions
710
def _has_merges(branch, rev_id):
711
"""Does a revision have multiple parents or not?"""
712
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
713
return len(parents) > 1
716
def _compute_revno_str(branch, rev_id):
717
"""Compute the revno string from a rev_id.
719
:return: The revno string, or None if the revision is not in the supplied
132
LogFormatter object to show the output.
135
If true, list only the commits affecting the specified
136
file, rather than all commits.
139
If true show added/changed/deleted/renamed files.
142
'reverse' (default) is latest to earliest;
143
'forward' is earliest to latest.
146
If not None, only show revisions >= start_revision
149
If not None, only show revisions <= end_revision
723
revno = branch.revision_id_to_dotted_revno(rev_id)
724
except errors.NoSuchRevision:
725
# The revision must be outside of this branch
728
return '.'.join(str(n) for n in revno)
731
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
732
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
733
if start_rev_id and end_rev_id:
735
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
736
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
737
except errors.NoSuchRevision:
738
# one or both is not in the branch; not obvious
740
if len(start_dotted) == 1 and len(end_dotted) == 1:
742
return start_dotted[0] <= end_dotted[0]
743
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
744
start_dotted[0:1] == end_dotted[0:1]):
745
# both on same development line
746
return start_dotted[2] <= end_dotted[2]
750
# if either start or end is not specified then we use either the first or
751
# the last revision and *they* are obvious ancestors.
755
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
756
exclude_common_ancestry=False):
757
"""Calculate a sequence of revisions to view, newest to oldest.
759
:param start_rev_id: the lower revision-id
760
:param end_rev_id: the upper revision-id
761
:param exclude_common_ancestry: Whether the start_rev_id should be part of
762
the iterated revisions.
763
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
764
dotted_revno will be None for ghosts
765
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
766
is not found walking the left-hand history
768
repo = branch.repository
769
graph = repo.get_graph()
770
if start_rev_id is None and end_rev_id is None:
771
if branch._format.stores_revno() or \
772
config.GlobalStack().get('calculate_revnos'):
774
br_revno, br_rev_id = branch.last_revision_info()
775
except errors.GhostRevisionsHaveNoRevno:
776
br_rev_id = branch.last_revision()
781
br_rev_id = branch.last_revision()
784
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
785
(_mod_revision.NULL_REVISION,))
788
revision_id = next(graph_iter)
789
except errors.RevisionNotPresent as e:
791
yield e.revision_id, None, None
793
except StopIteration:
796
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
797
if cur_revno is not None:
800
br_rev_id = branch.last_revision()
801
if end_rev_id is None:
802
end_rev_id = br_rev_id
803
found_start = start_rev_id is None
804
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
805
(_mod_revision.NULL_REVISION,))
808
revision_id = next(graph_iter)
809
except StopIteration:
811
except errors.RevisionNotPresent as e:
813
yield e.revision_id, None, None
816
revno_str = _compute_revno_str(branch, revision_id)
817
if not found_start and revision_id == start_rev_id:
818
if not exclude_common_ancestry:
819
yield revision_id, revno_str, 0
823
yield revision_id, revno_str, 0
825
raise _StartNotLinearAncestor()
828
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
829
rebase_initial_depths=True,
830
exclude_common_ancestry=False):
831
"""Calculate revisions to view including merges, newest to oldest.
833
:param branch: the branch
834
:param start_rev_id: the lower revision-id
835
:param end_rev_id: the upper revision-id
836
:param rebase_initial_depth: should depths be rebased until a mainline
838
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
840
if exclude_common_ancestry:
841
stop_rule = 'with-merges-without-common-ancestry'
843
stop_rule = 'with-merges'
844
view_revisions = branch.iter_merge_sorted_revisions(
845
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
847
if not rebase_initial_depths:
848
for (rev_id, merge_depth, revno, end_of_merge
850
yield rev_id, '.'.join(map(str, revno)), merge_depth
852
# We're following a development line starting at a merged revision.
853
# We need to adjust depths down by the initial depth until we find
854
# a depth less than it. Then we use that depth as the adjustment.
855
# If and when we reach the mainline, depth adjustment ends.
856
depth_adjustment = None
857
for (rev_id, merge_depth, revno, end_of_merge
859
if depth_adjustment is None:
860
depth_adjustment = merge_depth
862
if merge_depth < depth_adjustment:
863
# From now on we reduce the depth adjustement, this can be
864
# surprising for users. The alternative requires two passes
865
# which breaks the fast display of the first revision
867
depth_adjustment = merge_depth
868
merge_depth -= depth_adjustment
869
yield rev_id, '.'.join(map(str, revno)), merge_depth
872
def _rebase_merge_depth(view_revisions):
873
"""Adjust depths upwards so the top level is 0."""
874
# If either the first or last revision have a merge_depth of 0, we're done
875
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
876
min_depth = min([d for r, n, d in view_revisions])
878
view_revisions = [(r, n, d - min_depth)
879
for r, n, d in view_revisions]
880
return view_revisions
883
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
884
files=None, direction='reverse'):
885
"""Create a revision iterator for log.
887
:param branch: The branch being logged.
888
:param view_revisions: The revisions being viewed.
889
:param generate_delta: Whether to generate a delta for each revision.
890
Permitted values are None, 'full' and 'partial'.
891
:param search: A user text search string.
892
:param files: If non empty, only revisions matching one or more of
893
the files are to be kept.
894
:param direction: the direction in which view_revisions is sorted
895
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
898
# Convert view_revisions into (view, None, None) groups to fit with
899
# the standard interface here.
900
if isinstance(view_revisions, list):
901
# A single batch conversion is faster than many incremental ones.
902
# As we have all the data, do a batch conversion.
903
nones = [None] * len(view_revisions)
904
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
907
for view in view_revisions:
908
yield (view, None, None)
909
log_rev_iterator = iter([_convert()])
910
for adapter in log_adapters:
911
# It would be nicer if log adapters were first class objects
912
# with custom parameters. This will do for now. IGC 20090127
913
if adapter == _make_delta_filter:
914
log_rev_iterator = adapter(
915
branch, generate_delta, search, log_rev_iterator, files,
918
log_rev_iterator = adapter(
919
branch, generate_delta, search, log_rev_iterator)
920
return log_rev_iterator
923
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
924
"""Create a filtered iterator of log_rev_iterator matching on a regex.
926
:param branch: The branch being logged.
927
:param generate_delta: Whether to generate a delta for each revision.
928
:param match: A dictionary with properties as keys and lists of strings
929
as values. To match, a revision may match any of the supplied strings
930
within a single property but must match at least one string for each
932
:param log_rev_iterator: An input iterator containing all revisions that
933
could be displayed, in lists.
934
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
938
return log_rev_iterator
939
# Use lazy_compile so mapping to InvalidPattern error occurs.
940
searchRE = [(k, [lazy_regex.lazy_compile(x, re.IGNORECASE) for x in v])
941
for k, v in match.items()]
942
return _filter_re(searchRE, log_rev_iterator)
945
def _filter_re(searchRE, log_rev_iterator):
946
for revs in log_rev_iterator:
947
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
952
def _match_filter(searchRE, rev):
954
'message': (rev.message,),
955
'committer': (rev.committer,),
956
'author': (rev.get_apparent_authors()),
957
'bugs': list(rev.iter_bugs())
959
strings[''] = [item for inner_list in strings.values()
960
for item in inner_list]
961
for k, v in searchRE:
962
if k in strings and not _match_any_filter(strings[k], v):
967
def _match_any_filter(strings, res):
968
return any(r.search(s) for r in res for s in strings)
971
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
972
files=None, direction='reverse'):
973
"""Add revision deltas to a log iterator if needed.
975
:param branch: The branch being logged.
976
:param generate_delta: Whether to generate a delta for each revision.
977
Permitted values are None, 'full' and 'partial'.
978
:param search: A user text search string.
979
:param log_rev_iterator: An input iterator containing all revisions that
980
could be displayed, in lists.
981
:param files: If non empty, only revisions matching one or more of
982
the files are to be kept.
983
:param direction: the direction in which view_revisions is sorted
984
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
987
if not generate_delta and not files:
988
return log_rev_iterator
989
return _generate_deltas(branch.repository, log_rev_iterator,
990
generate_delta, files, direction)
993
def _generate_deltas(repository, log_rev_iterator, delta_type, files,
995
"""Create deltas for each batch of revisions in log_rev_iterator.
997
If we're only generating deltas for the sake of filtering against
998
files, we stop generating deltas once all files reach the
999
appropriate life-cycle point. If we're receiving data newest to
1000
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
1002
check_files = files is not None and len(files) > 0
1004
file_set = set(files)
1005
if direction == 'reverse':
1011
for revs in log_rev_iterator:
1012
# If we were matching against files and we've run out,
1013
# there's nothing left to do
1014
if check_files and not file_set:
1016
revisions = [rev[1] for rev in revs]
1018
if delta_type == 'full' and not check_files:
1019
deltas = repository.get_revision_deltas(revisions)
1020
for rev, delta in zip(revs, deltas):
1021
new_revs.append((rev[0], rev[1], delta))
1023
deltas = repository.get_revision_deltas(
1024
revisions, specific_files=file_set)
1025
for rev, delta in zip(revs, deltas):
1027
if delta is None or not delta.has_changed():
1030
_update_files(delta, file_set, stop_on)
1031
if delta_type is None:
1033
elif delta_type == 'full':
1034
# If the file matches all the time, rebuilding
1035
# a full delta like this in addition to a partial
1036
# one could be slow. However, it's likely that
1037
# most revisions won't get this far, making it
1038
# faster to filter on the partial deltas and
1039
# build the occasional full delta than always
1040
# building full deltas and filtering those.
1042
delta = repository.get_revision_delta(rev_id)
1043
new_revs.append((rev[0], rev[1], delta))
1047
def _update_files(delta, files, stop_on):
1048
"""Update the set of files to search based on file lifecycle events.
1050
:param files: a set of files to update
1051
:param stop_on: either 'add' or 'remove' - take files out of the
1052
files set once their add or remove entry is detected respectively
1054
if stop_on == 'add':
1055
for item in delta.added:
1056
if item.path[1] in files:
1057
files.remove(item.path[1])
1058
for item in delta.copied + delta.renamed:
1059
if item.path[1] in files:
1060
files.remove(item.path[1])
1061
files.add(item.path[0])
1062
if item.kind[1] == 'directory':
1063
for path in list(files):
1064
if is_inside(item.path[1], path):
1066
files.add(item.path[0] + path[len(item.path[1]):])
1067
elif stop_on == 'delete':
1068
for item in delta.removed:
1069
if item.path[0] in files:
1070
files.remove(item.path[0])
1071
for item in delta.copied + delta.renamed:
1072
if item.path[0] in files:
1073
files.remove(item.path[0])
1074
files.add(item.path[1])
1075
if item.kind[0] == 'directory':
1076
for path in list(files):
1077
if is_inside(item.path[0], path):
1079
files.add(item.path[1] + path[len(item.path[0]):])
1082
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1083
"""Extract revision objects from the repository
1085
:param branch: The branch being logged.
1086
:param generate_delta: Whether to generate a delta for each revision.
1087
:param search: A user text search string.
1088
:param log_rev_iterator: An input iterator containing all revisions that
1089
could be displayed, in lists.
1090
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1093
repository = branch.repository
1094
for revs in log_rev_iterator:
1095
# r = revision_id, n = revno, d = merge depth
1096
revision_ids = [view[0] for view, _, _ in revs]
1097
revisions = dict(repository.iter_revisions(revision_ids))
1098
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1101
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1102
"""Group up a single large batch into smaller ones.
1104
:param branch: The branch being logged.
1105
:param generate_delta: Whether to generate a delta for each revision.
1106
:param search: A user text search string.
1107
:param log_rev_iterator: An input iterator containing all revisions that
1108
could be displayed, in lists.
1109
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1113
for batch in log_rev_iterator:
1116
step = [detail for _, detail in zip(range(num), batch)]
1120
num = min(int(num * 1.5), 200)
1123
def _get_revision_limits(branch, start_revision, end_revision):
1124
"""Get and check revision limits.
1126
:param branch: The branch containing the revisions.
1128
:param start_revision: The first revision to be logged, as a RevisionInfo.
1130
:param end_revision: The last revision to be logged, as a RevisionInfo
1132
:return: (start_rev_id, end_rev_id) tuple.
1136
if start_revision is not None:
1137
if not isinstance(start_revision, revisionspec.RevisionInfo):
1138
raise TypeError(start_revision)
1139
start_rev_id = start_revision.rev_id
1140
start_revno = start_revision.revno
1141
if start_revno is None:
1146
if end_revision is not None:
1147
if not isinstance(end_revision, revisionspec.RevisionInfo):
1148
raise TypeError(start_revision)
1149
end_rev_id = end_revision.rev_id
1150
end_revno = end_revision.revno
1152
if branch.last_revision() != _mod_revision.NULL_REVISION:
1153
if (start_rev_id == _mod_revision.NULL_REVISION
1154
or end_rev_id == _mod_revision.NULL_REVISION):
1155
raise errors.CommandError(
1156
gettext('Logging revision 0 is invalid.'))
1157
if end_revno is not None and start_revno > end_revno:
1158
raise errors.CommandError(
1159
gettext("Start revision must be older than the end revision."))
1160
return (start_rev_id, end_rev_id)
1163
def _get_mainline_revs(branch, start_revision, end_revision):
1164
"""Get the mainline revisions from the branch.
1166
Generates the list of mainline revisions for the branch.
1168
:param branch: The branch containing the revisions.
1170
:param start_revision: The first revision to be logged.
1171
For backwards compatibility this may be a mainline integer revno,
1172
but for merge revision support a RevisionInfo is expected.
1174
:param end_revision: The last revision to be logged.
1175
For backwards compatibility this may be a mainline integer revno,
1176
but for merge revision support a RevisionInfo is expected.
1178
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1180
branch_revno, branch_last_revision = branch.last_revision_info()
1181
if branch_revno == 0:
1182
return None, None, None, None
1184
# For mainline generation, map start_revision and end_revision to
1185
# mainline revnos. If the revision is not on the mainline choose the
1186
# appropriate extreme of the mainline instead - the extra will be
1188
# Also map the revisions to rev_ids, to be used in the later filtering
153
_show_log(branch, lf, specific_fileid, verbose, direction,
154
start_revision, end_revision, search)
158
def _show_log(branch,
160
specific_fileid=None,
166
"""Worker function for show_log - see show_log."""
167
from bzrlib.osutils import format_date
168
from bzrlib.errors import BzrCheckError
170
from warnings import warn
172
if not isinstance(lf, LogFormatter):
173
warn("not a LogFormatter instance: %r" % lf)
176
mutter('get log for file_id %r', specific_fileid)
178
if search is not None:
180
searchRE = re.compile(search, re.IGNORECASE)
184
which_revs = _enumerate_history(branch)
1191
186
if start_revision is None:
1194
if isinstance(start_revision, revisionspec.RevisionInfo):
1195
start_rev_id = start_revision.rev_id
1196
start_revno = start_revision.revno or 1
1198
branch.check_real_revno(start_revision)
1199
start_revno = start_revision
189
branch.check_real_revno(start_revision)
1202
191
if end_revision is None:
1203
end_revno = branch_revno
1205
if isinstance(end_revision, revisionspec.RevisionInfo):
1206
end_rev_id = end_revision.rev_id
1207
end_revno = end_revision.revno or branch_revno
1209
branch.check_real_revno(end_revision)
1210
end_revno = end_revision
1212
if ((start_rev_id == _mod_revision.NULL_REVISION)
1213
or (end_rev_id == _mod_revision.NULL_REVISION)):
1214
raise errors.CommandError(gettext('Logging revision 0 is invalid.'))
1215
if start_revno > end_revno:
1216
raise errors.CommandError(gettext("Start revision must be older "
1217
"than the end revision."))
1219
if end_revno < start_revno:
1220
return None, None, None, None
1221
cur_revno = branch_revno
1224
graph = branch.repository.get_graph()
1225
for revision_id in graph.iter_lefthand_ancestry(
1226
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1227
if cur_revno < start_revno:
1228
# We have gone far enough, but we always add 1 more revision
1229
rev_nos[revision_id] = cur_revno
1230
mainline_revs.append(revision_id)
1232
if cur_revno <= end_revno:
1233
rev_nos[revision_id] = cur_revno
1234
mainline_revs.append(revision_id)
1237
# We walked off the edge of all revisions, so we add a 'None' marker
1238
mainline_revs.append(None)
1240
mainline_revs.reverse()
192
end_revision = len(which_revs)
194
branch.check_real_revno(end_revision)
196
# list indexes are 0-based; revisions are 1-based
197
cut_revs = which_revs[(start_revision-1):(end_revision)]
201
# convert the revision history to a dictionary:
202
rev_nos = dict((k, v) for v, k in cut_revs)
1242
204
# override the mainline to look like the revision history.
1243
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1246
def _filter_revisions_touching_path(branch, path, view_revisions,
1247
include_merges=True):
1248
r"""Return the list of revision ids which touch a given path.
1250
The function filters view_revisions and returns a subset.
1251
This includes the revisions which directly change the path,
1252
and the revisions which merge these changes. So if the
1265
And 'C' changes a file, then both C and D will be returned. F will not be
1266
returned even though it brings the changes to C into the branch starting
1267
with E. (Note that if we were using F as the tip instead of G, then we
1270
This will also be restricted based on a subset of the mainline.
1272
:param branch: The branch where we can get text revision information.
1274
:param path: Filter out revisions that do not touch path.
1276
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1277
tuples. This is the list of revisions which will be filtered. It is
1278
assumed that view_revisions is in merge_sort order (i.e. newest
1281
:param include_merges: include merge revisions in the result or not
1283
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
205
mainline_revs = [revision_id for index, revision_id in cut_revs]
206
if cut_revs[0][0] == 1:
207
mainline_revs.insert(0, None)
209
mainline_revs.insert(0, which_revs[start_revision-2][1])
210
# how should we show merged revisions ?
211
# old api: show_merge. New api: show_merge_revno
212
show_merge_revno = getattr(lf, 'show_merge_revno', None)
213
show_merge = getattr(lf, 'show_merge', None)
214
if show_merge is None and show_merge_revno is None:
215
# no merged-revno support
216
include_merges = False
218
include_merges = True
219
if show_merge is not None and show_merge_revno is None:
220
# tell developers to update their code
221
symbol_versioning.warn('LogFormatters should provide show_merge_revno '
222
'instead of show_merge since bzr 0.11.',
223
DeprecationWarning, stacklevel=3)
224
view_revisions = list(get_view_revisions(mainline_revs, rev_nos, branch,
225
direction, include_merges=include_merges))
227
def iter_revisions():
228
# r = revision, n = revno, d = merge depth
229
revision_ids = [r for r, n, d in view_revisions]
230
zeros = set(r for r, n, d in view_revisions if d == 0)
232
repository = branch.repository
235
revisions = repository.get_revisions(revision_ids[:num])
236
if verbose or specific_fileid:
237
delta_revisions = [r for r in revisions if
238
r.revision_id in zeros]
239
deltas = repository.get_deltas_for_revisions(delta_revisions)
240
cur_deltas = dict(izip((r.revision_id for r in
241
delta_revisions), deltas))
242
for revision in revisions:
243
# The delta value will be None unless
244
# 1. verbose or specific_fileid is specified, and
245
# 2. the revision is a mainline revision
246
yield revision, cur_deltas.get(revision.revision_id)
247
revision_ids = revision_ids[num:]
250
# now we just print all the revisions
251
for ((rev_id, revno, merge_depth), (rev, delta)) in \
252
izip(view_revisions, iter_revisions()):
255
if not searchRE.search(rev.message):
259
# a mainline revision.
262
if not delta.touches_file_id(specific_fileid):
266
# although we calculated it, throw it away without display
269
lf.show(revno, rev, delta)
271
if show_merge_revno is None:
272
lf.show_merge(rev, merge_depth)
274
lf.show_merge_revno(rev, merge_depth, revno)
277
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
278
include_merges=True):
279
"""Produce an iterator of revisions to show
280
:return: an iterator of (revision_id, revno, merge_depth)
281
(if there is no revno for a revision, None is supplied)
1285
# Lookup all possible text keys to determine which ones actually modified
1287
graph = branch.repository.get_file_graph()
1288
start_tree = branch.repository.revision_tree(view_revisions[0][0])
1289
file_id = start_tree.path2id(path)
1290
get_parent_map = graph.get_parent_map
1291
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1293
# Looking up keys in batches of 1000 can cut the time in half, as well as
1294
# memory consumption. GraphIndex *does* like to look for a few keys in
1295
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1296
# TODO: This code needs to be re-evaluated periodically as we tune the
1297
# indexing layer. We might consider passing in hints as to the known
1298
# access pattern (sparse/clustered, high success rate/low success
1299
# rate). This particular access is clustered with a low success rate.
1300
modified_text_revisions = set()
1302
for start in range(0, len(text_keys), chunk_size):
1303
next_keys = text_keys[start:start + chunk_size]
1304
# Only keep the revision_id portion of the key
1305
modified_text_revisions.update(
1306
[k[1] for k in get_parent_map(next_keys)])
1307
del text_keys, next_keys
1310
# Track what revisions will merge the current revision, replace entries
1311
# with 'None' when they have been added to result
1312
current_merge_stack = [None]
1313
for info in view_revisions:
1314
rev_id, revno, depth = info
1315
if depth == len(current_merge_stack):
1316
current_merge_stack.append(info)
1318
del current_merge_stack[depth + 1:]
1319
current_merge_stack[-1] = info
1321
if rev_id in modified_text_revisions:
1322
# This needs to be logged, along with the extra revisions
1323
for idx in range(len(current_merge_stack)):
1324
node = current_merge_stack[idx]
1325
if node is not None:
1326
if include_merges or node[2] == 0:
1328
current_merge_stack[idx] = None
283
if include_merges is False:
284
revision_ids = mainline_revs[1:]
285
if direction == 'reverse':
286
revision_ids.reverse()
287
for revision_id in revision_ids:
288
yield revision_id, str(rev_nos[revision_id]), 0
290
merge_sorted_revisions = merge_sort(
291
branch.repository.get_revision_graph(mainline_revs[-1]),
296
if direction == 'forward':
297
# forward means oldest first.
298
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
299
elif direction != 'reverse':
300
raise ValueError('invalid direction %r' % direction)
302
for sequence, rev_id, merge_depth, revno, end_of_merge in merge_sorted_revisions:
303
yield rev_id, '.'.join(map(str, revno)), merge_depth
1332
306
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1333
307
"""Reverse revisions by depth.
1335
309
Revisions with a different depth are sorted as a group with the previous
1336
revision of that depth. There may be no topological justification for this
310
revision of that depth. There may be no topological justification for this,
1337
311
but it looks much nicer.
1339
# Add a fake revision at start so that we can always attach sub revisions
1340
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1341
313
zd_revisions = []
1342
314
for val in merge_sorted_revisions:
1343
315
if val[2] == _depth:
1344
# Each revision at the current depth becomes a chunk grouping all
1345
# higher depth revisions.
1346
316
zd_revisions.append([val])
318
assert val[2] > _depth
1348
319
zd_revisions[-1].append(val)
1349
320
for revisions in zd_revisions:
1350
321
if len(revisions) > 1:
1351
# We have higher depth revisions, let reverse them locally
1352
322
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1353
323
zd_revisions.reverse()
1355
325
for chunk in zd_revisions:
1356
326
result.extend(chunk)
1358
# Top level call, get rid of the fake revisions that have been added
1359
result = [r for r in result if r[0] is not None and r[1] is not None]
1363
class LogRevision(object):
1364
"""A revision to be logged (by LogFormatter.log_revision).
1366
A simple wrapper for the attributes of a revision to be logged.
1367
The attributes may or may not be populated, as determined by the
1368
logging options and the log formatter capabilities.
1371
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1372
tags=None, diff=None, signature=None):
1377
self.revno = str(revno)
1378
self.merge_depth = merge_depth
1382
self.signature = signature
1385
330
class LogFormatter(object):
1386
"""Abstract class to display log messages.
1388
At a minimum, a derived class must implement the log_revision method.
1390
If the LogFormatter needs to be informed of the beginning or end of
1391
a log it should implement the begin_log and/or end_log hook methods.
1393
A LogFormatter should define the following supports_XXX flags
1394
to indicate which LogRevision attributes it supports:
1396
- supports_delta must be True if this log formatter supports delta.
1397
Otherwise the delta attribute may not be populated. The 'delta_format'
1398
attribute describes whether the 'short_status' format (1) or the long
1399
one (2) should be used.
1401
- supports_merge_revisions must be True if this log formatter supports
1402
merge revisions. If not, then only mainline revisions will be passed
1405
- preferred_levels is the number of levels this formatter defaults to.
1406
The default value is zero meaning display all levels.
1407
This value is only relevant if supports_merge_revisions is True.
1409
- supports_tags must be True if this log formatter supports tags.
1410
Otherwise the tags attribute may not be populated.
1412
- supports_diff must be True if this log formatter supports diffs.
1413
Otherwise the diff attribute may not be populated.
1415
- supports_signatures must be True if this log formatter supports GPG
1418
Plugins can register functions to show custom revision properties using
1419
the properties_handler_registry. The registered function
1420
must respect the following interface description::
1422
def my_show_properties(properties_dict):
1423
# code that returns a dict {'name':'value'} of the properties
1426
preferred_levels = 0
1428
def __init__(self, to_file, show_ids=False, show_timezone='original',
1429
delta_format=None, levels=None, show_advice=False,
1430
to_exact_file=None, author_list_handler=None):
1431
"""Create a LogFormatter.
1433
:param to_file: the file to output to
1434
:param to_exact_file: if set, gives an output stream to which
1435
non-Unicode diffs are written.
1436
:param show_ids: if True, revision-ids are to be displayed
1437
:param show_timezone: the timezone to use
1438
:param delta_format: the level of delta information to display
1439
or None to leave it to the formatter to decide
1440
:param levels: the number of levels to display; None or -1 to
1441
let the log formatter decide.
1442
:param show_advice: whether to show advice at the end of the
1444
:param author_list_handler: callable generating a list of
1445
authors to display for a given revision
331
"""Abstract class to display log messages."""
333
def __init__(self, to_file, show_ids=False, show_timezone='original'):
1447
334
self.to_file = to_file
1448
# 'exact' stream used to show diff, it should print content 'as is'
1449
# and should not try to decode/encode it to unicode to avoid bug
1451
if to_exact_file is not None:
1452
self.to_exact_file = to_exact_file
1454
# XXX: somewhat hacky; this assumes it's a codec writer; it's
1455
# better for code that expects to get diffs to pass in the exact
1457
self.to_exact_file = getattr(to_file, 'stream', to_file)
1458
335
self.show_ids = show_ids
1459
336
self.show_timezone = show_timezone
1460
if delta_format is None:
1461
# Ensures backward compatibility
1462
delta_format = 2 # long format
1463
self.delta_format = delta_format
1464
self.levels = levels
1465
self._show_advice = show_advice
1466
self._merge_count = 0
1467
self._author_list_handler = author_list_handler
1469
def get_levels(self):
1470
"""Get the number of levels to display or 0 for all."""
1471
if getattr(self, 'supports_merge_revisions', False):
1472
if self.levels is None or self.levels == -1:
1473
self.levels = self.preferred_levels
1478
def log_revision(self, revision):
1481
:param revision: The LogRevision to be logged.
338
def show(self, revno, rev, delta):
1483
339
raise NotImplementedError('not implemented in abstract base')
1485
def show_advice(self):
1486
"""Output user advice, if any, when the log is completed."""
1487
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1488
advice_sep = self.get_advice_separator()
1490
self.to_file.write(advice_sep)
1492
"Use --include-merged or -n0 to see merged revisions.\n")
1494
def get_advice_separator(self):
1495
"""Get the text separating the log from the closing advice."""
1498
341
def short_committer(self, rev):
1499
name, address = config.parse_username(rev.committer)
1504
def short_author(self, rev):
1505
return self.authors(rev, 'first', short=True, sep=', ')
1507
def authors(self, rev, who, short=False, sep=None):
1508
"""Generate list of authors, taking --authors option into account.
1510
The caller has to specify the name of a author list handler,
1511
as provided by the author list registry, using the ``who``
1512
argument. That name only sets a default, though: when the
1513
user selected a different author list generation using the
1514
``--authors`` command line switch, as represented by the
1515
``author_list_handler`` constructor argument, that value takes
1518
:param rev: The revision for which to generate the list of authors.
1519
:param who: Name of the default handler.
1520
:param short: Whether to shorten names to either name or address.
1521
:param sep: What separator to use for automatic concatenation.
1523
if self._author_list_handler is not None:
1524
# The user did specify --authors, which overrides the default
1525
author_list_handler = self._author_list_handler
1527
# The user didn't specify --authors, so we use the caller's default
1528
author_list_handler = author_list_registry.get(who)
1529
names = author_list_handler(rev)
1531
for i in range(len(names)):
1532
name, address = config.parse_username(names[i])
1538
names = sep.join(names)
1541
def merge_marker(self, revision):
1542
"""Get the merge marker to include in the output or '' if none."""
1543
if len(revision.rev.parent_ids) > 1:
1544
self._merge_count += 1
1549
def show_properties(self, revision, indent):
1550
"""Displays the custom properties returned by each registered handler.
1552
If a registered handler raises an error it is propagated.
1554
for line in self.custom_properties(revision):
1555
self.to_file.write("%s%s\n" % (indent, line))
1557
def custom_properties(self, revision):
1558
"""Format the custom properties returned by each registered handler.
1560
If a registered handler raises an error it is propagated.
1562
:return: a list of formatted lines (excluding trailing newlines)
1564
lines = self._foreign_info_properties(revision)
1565
for key, handler in properties_handler_registry.iteritems():
1567
lines.extend(self._format_properties(handler(revision)))
1569
trace.log_exception_quietly()
1570
trace.print_exception(sys.exc_info(), self.to_file)
1573
def _foreign_info_properties(self, rev):
1574
"""Custom log displayer for foreign revision identifiers.
1576
:param rev: Revision object.
1578
# Revision comes directly from a foreign repository
1579
if isinstance(rev, foreign.ForeignRevision):
1580
return self._format_properties(
1581
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1583
# Imported foreign revision revision ids always contain :
1584
if b":" not in rev.revision_id:
1587
# Revision was once imported from a foreign repository
1589
foreign_revid, mapping = \
1590
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1591
except errors.InvalidRevisionId:
1594
return self._format_properties(
1595
mapping.vcs.show_foreign_revid(foreign_revid))
1597
def _format_properties(self, properties):
1599
for key, value in properties.items():
1600
lines.append(key + ': ' + value)
1603
def show_diff(self, to_file, diff, indent):
1604
encoding = get_terminal_encoding()
1605
for l in diff.rstrip().split(b'\n'):
1606
to_file.write(indent + l.decode(encoding, 'ignore') + '\n')
1609
# Separator between revisions in long format
1610
_LONG_SEP = '-' * 60
342
return re.sub('<.*@.*>', '', rev.committer).strip(' ')
1613
345
class LongLogFormatter(LogFormatter):
1615
supports_merge_revisions = True
1616
preferred_levels = 1
1617
supports_delta = True
1618
supports_tags = True
1619
supports_diff = True
1620
supports_signatures = True
1622
def __init__(self, *args, **kwargs):
1623
super(LongLogFormatter, self).__init__(*args, **kwargs)
1624
if self.show_timezone == 'original':
1625
self.date_string = self._date_string_original_timezone
1627
self.date_string = self._date_string_with_timezone
1629
def _date_string_with_timezone(self, rev):
1630
return format_date(rev.timestamp, rev.timezone or 0,
1633
def _date_string_original_timezone(self, rev):
1634
return format_date_with_offset_in_original_timezone(rev.timestamp,
1637
def log_revision(self, revision):
1638
"""Log a revision, either merged or not."""
1639
indent = ' ' * revision.merge_depth
1641
if revision.revno is not None:
1642
lines.append('revno: %s%s' % (revision.revno,
1643
self.merge_marker(revision)))
1645
lines.append('tags: %s' % (', '.join(sorted(revision.tags))))
1646
if self.show_ids or revision.revno is None:
1647
lines.append('revision-id: %s' %
1648
(revision.rev.revision_id.decode('utf-8'),))
346
def show(self, revno, rev, delta):
347
return self._show_helper(revno=revno, rev=rev, delta=delta)
349
@deprecated_method(zero_eleven)
350
def show_merge(self, rev, merge_depth):
351
return self._show_helper(rev=rev, indent=' '*merge_depth, merged=True, delta=None)
353
def show_merge_revno(self, rev, merge_depth, revno):
354
"""Show a merged revision rev, with merge_depth and a revno."""
355
return self._show_helper(rev=rev, revno=revno,
356
indent=' '*merge_depth, merged=True, delta=None)
358
def _show_helper(self, rev=None, revno=None, indent='', merged=False, delta=None):
359
"""Show a revision, either merged or not."""
360
from bzrlib.osutils import format_date
361
to_file = self.to_file
362
print >>to_file, indent+'-' * 60
363
if revno is not None:
364
print >>to_file, indent+'revno:', revno
366
print >>to_file, indent+'merged:', rev.revision_id
368
print >>to_file, indent+'revision-id:', rev.revision_id
1649
369
if self.show_ids:
1650
for parent_id in revision.rev.parent_ids:
1651
lines.append('parent: %s' % (parent_id.decode('utf-8'),))
1652
lines.extend(self.custom_properties(revision.rev))
1654
committer = revision.rev.committer
1655
authors = self.authors(revision.rev, 'all')
1656
if authors != [committer]:
1657
lines.append('author: %s' % (", ".join(authors),))
1658
lines.append('committer: %s' % (committer,))
1660
branch_nick = revision.rev.properties.get('branch-nick', None)
1661
if branch_nick is not None:
1662
lines.append('branch nick: %s' % (branch_nick,))
1664
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1666
if revision.signature is not None:
1667
lines.append('signature: ' + revision.signature)
1669
lines.append('message:')
1670
if not revision.rev.message:
1671
lines.append(' (no message)')
370
for parent_id in rev.parent_ids:
371
print >>to_file, indent+'parent:', parent_id
372
print >>to_file, indent+'committer:', rev.committer
374
print >>to_file, indent+'branch nick: %s' % \
375
rev.properties['branch-nick']
378
date_str = format_date(rev.timestamp,
381
print >>to_file, indent+'timestamp: %s' % date_str
383
print >>to_file, indent+'message:'
385
print >>to_file, indent+' (no message)'
1673
message = revision.rev.message.rstrip('\r\n')
387
message = rev.message.rstrip('\r\n')
1674
388
for l in message.split('\n'):
1675
lines.append(' %s' % (l,))
1677
# Dump the output, appending the delta and diff if requested
1678
to_file = self.to_file
1679
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1680
if revision.delta is not None:
1681
# Use the standard status output to display changes
1682
from breezy.delta import report_delta
1683
report_delta(to_file, revision.delta, short_status=False,
1684
show_ids=self.show_ids, indent=indent)
1685
if revision.diff is not None:
1686
to_file.write(indent + 'diff:\n')
1688
# Note: we explicitly don't indent the diff (relative to the
1689
# revision information) so that the output can be fed to patch -p0
1690
self.show_diff(self.to_exact_file, revision.diff, indent)
1691
self.to_exact_file.flush()
1693
def get_advice_separator(self):
1694
"""Get the text separating the log from the closing advice."""
1695
return '-' * 60 + '\n'
389
print >>to_file, indent+' ' + l
390
if delta is not None:
391
delta.show(to_file, self.show_ids)
1698
394
class ShortLogFormatter(LogFormatter):
1700
supports_merge_revisions = True
1701
preferred_levels = 1
1702
supports_delta = True
1703
supports_tags = True
1704
supports_diff = True
1706
def __init__(self, *args, **kwargs):
1707
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1708
self.revno_width_by_depth = {}
1710
def log_revision(self, revision):
1711
# We need two indents: one per depth and one for the information
1712
# relative to that indent. Most mainline revnos are 5 chars or
1713
# less while dotted revnos are typically 11 chars or less. Once
1714
# calculated, we need to remember the offset for a given depth
1715
# as we might be starting from a dotted revno in the first column
1716
# and we want subsequent mainline revisions to line up.
1717
depth = revision.merge_depth
1718
indent = ' ' * depth
1719
revno_width = self.revno_width_by_depth.get(depth)
1720
if revno_width is None:
1721
if revision.revno is None or revision.revno.find('.') == -1:
1722
# mainline revno, e.g. 12345
1725
# dotted revno, e.g. 12345.10.55
1727
self.revno_width_by_depth[depth] = revno_width
1728
offset = ' ' * (revno_width + 1)
395
def show(self, revno, rev, delta):
396
from bzrlib.osutils import format_date
1730
398
to_file = self.to_file
1733
tags = ' {%s}' % (', '.join(sorted(revision.tags)))
1734
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1735
revision.revno or "", self.short_author(
1737
format_date(revision.rev.timestamp,
1738
revision.rev.timezone or 0,
1739
self.show_timezone, date_fmt="%Y-%m-%d",
1741
tags, self.merge_marker(revision)))
1742
self.show_properties(revision.rev, indent + offset)
1743
if self.show_ids or revision.revno is None:
1744
to_file.write(indent + offset + 'revision-id:%s\n'
1745
% (revision.rev.revision_id.decode('utf-8'),))
1746
if not revision.rev.message:
1747
to_file.write(indent + offset + '(no message)\n')
399
date_str = format_date(rev.timestamp, rev.timezone or 0,
401
print >>to_file, "%5s %s\t%s" % (revno, self.short_committer(rev),
402
format_date(rev.timestamp, rev.timezone or 0,
403
self.show_timezone, date_fmt="%Y-%m-%d",
406
print >>to_file, ' revision-id:', rev.revision_id
408
print >>to_file, ' (no message)'
1749
message = revision.rev.message.rstrip('\r\n')
410
message = rev.message.rstrip('\r\n')
1750
411
for l in message.split('\n'):
1751
to_file.write(indent + offset + '%s\n' % (l,))
412
print >>to_file, ' ' + l
1753
if revision.delta is not None:
1754
# Use the standard status output to display changes
1755
from breezy.delta import report_delta
1756
report_delta(to_file, revision.delta,
1757
short_status=self.delta_format == 1,
1758
show_ids=self.show_ids, indent=indent + offset)
1759
if revision.diff is not None:
1760
self.show_diff(self.to_exact_file, revision.diff, ' ')
414
# TODO: Why not show the modified files in a shorter form as
415
# well? rewrap them single lines of appropriate length
416
if delta is not None:
417
delta.show(to_file, self.show_ids)
1764
421
class LineLogFormatter(LogFormatter):
1766
supports_merge_revisions = True
1767
preferred_levels = 1
1768
supports_tags = True
1770
def __init__(self, *args, **kwargs):
1771
super(LineLogFormatter, self).__init__(*args, **kwargs)
1772
width = terminal_width()
1773
if width is not None:
1774
# we need one extra space for terminals that wrap on last char
1776
self._max_chars = width
1778
422
def truncate(self, str, max_len):
1779
if max_len is None or len(str) <= max_len:
423
if len(str) <= max_len:
1781
return str[:max_len - 3] + '...'
425
return str[:max_len-3]+'...'
1783
427
def date_string(self, rev):
1784
return format_date(rev.timestamp, rev.timezone or 0,
428
from bzrlib.osutils import format_date
429
return format_date(rev.timestamp, rev.timezone or 0,
1785
430
self.show_timezone, date_fmt="%Y-%m-%d",
1786
431
show_offset=False)