1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
68
repository as _mod_repository,
69
revision as _mod_revision,
72
from breezy.i18n import gettext, ngettext
81
from .osutils import (
83
format_date_with_offset_in_original_timezone,
84
get_diff_header_encoding,
85
get_terminal_encoding,
93
from .tree import find_previous_path
96
def find_touching_revisions(repository, last_revision, last_tree, last_path):
97
"""Yield a description of revisions which affect the file_id.
99
Each returned element is (revno, revision_id, description)
101
This is the list of revisions where the file is either added,
102
modified, renamed or deleted.
104
TODO: Perhaps some way to limit this to only particular revisions,
105
or to traverse a non-mainline set of revisions?
107
last_verifier = last_tree.get_file_verifier(last_path)
108
graph = repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(last_revision, []))
111
for revision_id in history:
112
this_tree = repository.revision_tree(revision_id)
113
this_path = find_previous_path(last_tree, this_tree, last_path)
115
# now we know how it was last time, and how it is in this revision.
116
# are those two states effectively the same or not?
117
if this_path is not None and last_path is None:
118
yield revno, revision_id, "deleted " + this_path
119
this_verifier = this_tree.get_file_verifier(this_path)
120
elif this_path is None and last_path is not None:
121
yield revno, revision_id, "added " + last_path
122
elif this_path != last_path:
123
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
124
this_verifier = this_tree.get_file_verifier(this_path)
126
this_verifier = this_tree.get_file_verifier(this_path)
127
if (this_verifier != last_verifier):
128
yield revno, revision_id, "modified " + this_path
130
last_verifier = this_verifier
131
last_path = this_path
132
last_tree = this_tree
133
if last_path is None:
140
specific_fileid=None,
149
"""Write out human-readable log of commits to this branch.
151
This function is being retained for backwards compatibility but
152
should not be extended with new parameters. Use the new Logger class
153
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
154
make_log_request_dict function.
156
:param lf: The LogFormatter object showing the output.
158
:param specific_fileid: If not None, list only the commits affecting the
159
specified file, rather than all commits.
161
:param verbose: If True show added/changed/deleted/renamed files.
163
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
166
:param start_revision: If not None, only show revisions >= start_revision
168
:param end_revision: If not None, only show revisions <= end_revision
170
:param search: If not None, only show revisions with matching commit
173
:param limit: If set, shows only 'limit' revisions, all revisions are shown
176
:param show_diff: If True, output a diff after each revision.
178
:param match: Dictionary of search lists to use when matching revision
181
# Convert old-style parameters to new-style parameters
182
if specific_fileid is not None:
183
file_ids = [specific_fileid]
188
delta_type = 'partial'
195
diff_type = 'partial'
201
if isinstance(start_revision, int):
203
start_revision = revisionspec.RevisionInfo(branch, start_revision)
204
except errors.NoSuchRevision:
205
raise errors.InvalidRevisionNumber(start_revision)
207
if isinstance(end_revision, int):
209
end_revision = revisionspec.RevisionInfo(branch, end_revision)
210
except errors.NoSuchRevision:
211
raise errors.InvalidRevisionNumber(end_revision)
213
if end_revision is not None and end_revision.revno == 0:
214
raise errors.InvalidRevisionNumber(end_revision.revno)
216
# Build the request and execute it
217
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
218
start_revision=start_revision, end_revision=end_revision,
219
limit=limit, message_search=search,
220
delta_type=delta_type, diff_type=diff_type)
221
Logger(branch, rqst).show(lf)
224
# Note: This needs to be kept in sync with the defaults in
225
# make_log_request_dict() below
226
_DEFAULT_REQUEST_PARAMS = {
227
'direction': 'reverse',
229
'generate_tags': True,
230
'exclude_common_ancestry': False,
231
'_match_using_deltas': True,
235
def make_log_request_dict(direction='reverse', specific_fileids=None,
236
start_revision=None, end_revision=None, limit=None,
237
message_search=None, levels=None, generate_tags=True,
239
diff_type=None, _match_using_deltas=True,
240
exclude_common_ancestry=False, match=None,
241
signature=False, omit_merges=False,
243
"""Convenience function for making a logging request dictionary.
245
Using this function may make code slightly safer by ensuring
246
parameters have the correct names. It also provides a reference
247
point for documenting the supported parameters.
249
:param direction: 'reverse' (default) is latest to earliest;
250
'forward' is earliest to latest.
252
:param specific_fileids: If not None, only include revisions
253
affecting the specified files, rather than all revisions.
255
:param start_revision: If not None, only generate
256
revisions >= start_revision
258
:param end_revision: If not None, only generate
259
revisions <= end_revision
261
:param limit: If set, generate only 'limit' revisions, all revisions
262
are shown if None or 0.
264
:param message_search: If not None, only include revisions with
265
matching commit messages
267
:param levels: the number of levels of revisions to
268
generate; 1 for just the mainline; 0 for all levels, or None for
271
:param generate_tags: If True, include tags for matched revisions.
273
:param delta_type: Either 'full', 'partial' or None.
274
'full' means generate the complete delta - adds/deletes/modifies/etc;
275
'partial' means filter the delta using specific_fileids;
276
None means do not generate any delta.
278
:param diff_type: Either 'full', 'partial' or None.
279
'full' means generate the complete diff - adds/deletes/modifies/etc;
280
'partial' means filter the diff using specific_fileids;
281
None means do not generate any diff.
283
:param _match_using_deltas: a private parameter controlling the
284
algorithm used for matching specific_fileids. This parameter
285
may be removed in the future so breezy client code should NOT
288
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
289
range operator or as a graph difference.
291
:param signature: show digital signature information
293
:param match: Dictionary of list of search strings to use when filtering
294
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
295
the empty string to match any of the preceding properties.
297
:param omit_merges: If True, commits with more than one parent are
301
# Take care of old style message_search parameter
304
if 'message' in match:
305
match['message'].append(message_search)
307
match['message'] = [message_search]
309
match={ 'message': [message_search] }
311
'direction': direction,
312
'specific_fileids': specific_fileids,
313
'start_revision': start_revision,
314
'end_revision': end_revision,
317
'generate_tags': generate_tags,
318
'delta_type': delta_type,
319
'diff_type': diff_type,
320
'exclude_common_ancestry': exclude_common_ancestry,
321
'signature': signature,
323
'omit_merges': omit_merges,
324
# Add 'private' attributes for features that may be deprecated
325
'_match_using_deltas': _match_using_deltas,
329
def _apply_log_request_defaults(rqst):
330
"""Apply default values to a request dictionary."""
331
result = _DEFAULT_REQUEST_PARAMS.copy()
337
def format_signature_validity(rev_id, branch):
338
"""get the signature validity
340
:param rev_id: revision id to validate
341
:param branch: branch of revision
342
:return: human readable string to print to log
344
from breezy import gpg
346
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
347
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
348
if result[0] == gpg.SIGNATURE_VALID:
349
return u"valid signature from {0}".format(result[1])
350
if result[0] == gpg.SIGNATURE_KEY_MISSING:
351
return "unknown key {0}".format(result[1])
352
if result[0] == gpg.SIGNATURE_NOT_VALID:
353
return "invalid signature!"
354
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
355
return "no signature"
358
class LogGenerator(object):
359
"""A generator of log revisions."""
361
def iter_log_revisions(self):
362
"""Iterate over LogRevision objects.
364
:return: An iterator yielding LogRevision objects.
366
raise NotImplementedError(self.iter_log_revisions)
369
class Logger(object):
370
"""An object that generates, formats and displays a log."""
372
def __init__(self, branch, rqst):
375
:param branch: the branch to log
376
:param rqst: A dictionary specifying the query parameters.
377
See make_log_request_dict() for supported values.
380
self.rqst = _apply_log_request_defaults(rqst)
385
:param lf: The LogFormatter object to send the output to.
387
if not isinstance(lf, LogFormatter):
388
warn("not a LogFormatter instance: %r" % lf)
390
self.branch.lock_read()
392
if getattr(lf, 'begin_log', None):
395
if getattr(lf, 'end_log', None):
400
def _show_body(self, lf):
401
"""Show the main log output.
403
Subclasses may wish to override this.
405
# Tweak the LogRequest based on what the LogFormatter can handle.
406
# (There's no point generating stuff if the formatter can't display it.)
408
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
409
# user didn't specify levels, use whatever the LF can handle:
410
rqst['levels'] = lf.get_levels()
412
if not getattr(lf, 'supports_tags', False):
413
rqst['generate_tags'] = False
414
if not getattr(lf, 'supports_delta', False):
415
rqst['delta_type'] = None
416
if not getattr(lf, 'supports_diff', False):
417
rqst['diff_type'] = None
418
if not getattr(lf, 'supports_signatures', False):
419
rqst['signature'] = False
421
# Find and print the interesting revisions
422
generator = self._generator_factory(self.branch, rqst)
424
for lr in generator.iter_log_revisions():
426
except errors.GhostRevisionUnusableHere:
427
raise errors.BzrCommandError(
428
gettext('Further revision history missing.'))
431
def _generator_factory(self, branch, rqst):
432
"""Make the LogGenerator object to use.
434
Subclasses may wish to override this.
436
return _DefaultLogGenerator(branch, rqst)
439
class _StartNotLinearAncestor(Exception):
440
"""Raised when a start revision is not found walking left-hand history."""
443
class _DefaultLogGenerator(LogGenerator):
444
"""The default generator of log revisions."""
446
def __init__(self, branch, rqst):
449
if rqst.get('generate_tags') and branch.supports_tags():
450
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
452
self.rev_tag_dict = {}
454
def iter_log_revisions(self):
455
"""Iterate over LogRevision objects.
457
:return: An iterator yielding LogRevision objects.
460
levels = rqst.get('levels')
461
limit = rqst.get('limit')
462
diff_type = rqst.get('diff_type')
463
show_signature = rqst.get('signature')
464
omit_merges = rqst.get('omit_merges')
466
revision_iterator = self._create_log_revision_iterator()
467
for revs in revision_iterator:
468
for (rev_id, revno, merge_depth), rev, delta in revs:
469
# 0 levels means show everything; merge_depth counts from 0
470
if levels != 0 and merge_depth >= levels:
472
if omit_merges and len(rev.parent_ids) > 1:
475
raise errors.GhostRevisionUnusableHere(rev_id)
476
if diff_type is None:
479
diff = self._format_diff(rev, rev_id, diff_type)
481
signature = format_signature_validity(rev_id, self.branch)
484
yield LogRevision(rev, revno, merge_depth, delta,
485
self.rev_tag_dict.get(rev_id), diff, signature)
488
if log_count >= limit:
491
def _format_diff(self, rev, rev_id, diff_type):
492
repo = self.branch.repository
493
if len(rev.parent_ids) == 0:
494
ancestor_id = _mod_revision.NULL_REVISION
496
ancestor_id = rev.parent_ids[0]
497
tree_1 = repo.revision_tree(ancestor_id)
498
tree_2 = repo.revision_tree(rev_id)
499
file_ids = self.rqst.get('specific_fileids')
500
if diff_type == 'partial' and file_ids is not None:
501
specific_files = [tree_2.id2path(id) for id in file_ids]
503
specific_files = None
505
path_encoding = get_diff_header_encoding()
506
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
507
new_label='', path_encoding=path_encoding)
510
def _create_log_revision_iterator(self):
511
"""Create a revision iterator for log.
513
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
516
self.start_rev_id, self.end_rev_id = _get_revision_limits(
517
self.branch, self.rqst.get('start_revision'),
518
self.rqst.get('end_revision'))
519
if self.rqst.get('_match_using_deltas'):
520
return self._log_revision_iterator_using_delta_matching()
522
# We're using the per-file-graph algorithm. This scales really
523
# well but only makes sense if there is a single file and it's
525
file_count = len(self.rqst.get('specific_fileids'))
527
raise BzrError("illegal LogRequest: must match-using-deltas "
528
"when logging %d files" % file_count)
529
return self._log_revision_iterator_using_per_file_graph()
531
def _log_revision_iterator_using_delta_matching(self):
532
# Get the base revisions, filtering by the revision range
534
generate_merge_revisions = rqst.get('levels') != 1
535
delayed_graph_generation = not rqst.get('specific_fileids') and (
536
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
537
view_revisions = _calc_view_revisions(
538
self.branch, self.start_rev_id, self.end_rev_id,
539
rqst.get('direction'),
540
generate_merge_revisions=generate_merge_revisions,
541
delayed_graph_generation=delayed_graph_generation,
542
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
544
# Apply the other filters
545
return make_log_rev_iterator(self.branch, view_revisions,
546
rqst.get('delta_type'), rqst.get('match'),
547
file_ids=rqst.get('specific_fileids'),
548
direction=rqst.get('direction'))
550
def _log_revision_iterator_using_per_file_graph(self):
551
# Get the base revisions, filtering by the revision range.
552
# Note that we always generate the merge revisions because
553
# filter_revisions_touching_file_id() requires them ...
555
view_revisions = _calc_view_revisions(
556
self.branch, self.start_rev_id, self.end_rev_id,
557
rqst.get('direction'), generate_merge_revisions=True,
558
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
559
if not isinstance(view_revisions, list):
560
view_revisions = list(view_revisions)
561
view_revisions = _filter_revisions_touching_file_id(self.branch,
562
rqst.get('specific_fileids')[0], view_revisions,
563
include_merges=rqst.get('levels') != 1)
564
return make_log_rev_iterator(self.branch, view_revisions,
565
rqst.get('delta_type'), rqst.get('match'))
568
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
569
generate_merge_revisions,
570
delayed_graph_generation=False,
571
exclude_common_ancestry=False,
573
"""Calculate the revisions to view.
575
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
576
a list of the same tuples.
578
if (exclude_common_ancestry and start_rev_id == end_rev_id):
579
raise errors.BzrCommandError(gettext(
580
'--exclude-common-ancestry requires two different revisions'))
581
if direction not in ('reverse', 'forward'):
582
raise ValueError(gettext('invalid direction %r') % direction)
583
br_rev_id = branch.last_revision()
584
if br_rev_id == _mod_revision.NULL_REVISION:
587
if (end_rev_id and start_rev_id == end_rev_id
588
and (not generate_merge_revisions
589
or not _has_merges(branch, end_rev_id))):
590
# If a single revision is requested, check we can handle it
591
return _generate_one_revision(branch, end_rev_id, br_rev_id,
593
if not generate_merge_revisions:
595
# If we only want to see linear revisions, we can iterate ...
596
iter_revs = _linear_view_revisions(
597
branch, start_rev_id, end_rev_id,
598
exclude_common_ancestry=exclude_common_ancestry)
599
# If a start limit was given and it's not obviously an
600
# ancestor of the end limit, check it before outputting anything
601
if (direction == 'forward'
602
or (start_rev_id and not _is_obvious_ancestor(
603
branch, start_rev_id, end_rev_id))):
604
iter_revs = list(iter_revs)
605
if direction == 'forward':
606
iter_revs = reversed(iter_revs)
608
except _StartNotLinearAncestor:
609
# Switch to the slower implementation that may be able to find a
610
# non-obvious ancestor out of the left-hand history.
612
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
613
direction, delayed_graph_generation,
614
exclude_common_ancestry)
615
if direction == 'forward':
616
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
620
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
621
if rev_id == br_rev_id:
623
return [(br_rev_id, br_revno, 0)]
625
revno_str = _compute_revno_str(branch, rev_id)
626
return [(rev_id, revno_str, 0)]
629
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
630
delayed_graph_generation,
631
exclude_common_ancestry=False):
632
# On large trees, generating the merge graph can take 30-60 seconds
633
# so we delay doing it until a merge is detected, incrementally
634
# returning initial (non-merge) revisions while we can.
636
# The above is only true for old formats (<= 0.92), for newer formats, a
637
# couple of seconds only should be needed to load the whole graph and the
638
# other graph operations needed are even faster than that -- vila 100201
639
initial_revisions = []
640
if delayed_graph_generation:
642
for rev_id, revno, depth in _linear_view_revisions(
643
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
644
if _has_merges(branch, rev_id):
645
# The end_rev_id can be nested down somewhere. We need an
646
# explicit ancestry check. There is an ambiguity here as we
647
# may not raise _StartNotLinearAncestor for a revision that
648
# is an ancestor but not a *linear* one. But since we have
649
# loaded the graph to do the check (or calculate a dotted
650
# revno), we may as well accept to show the log... We need
651
# the check only if start_rev_id is not None as all
652
# revisions have _mod_revision.NULL_REVISION as an ancestor
654
graph = branch.repository.get_graph()
655
if (start_rev_id is not None
656
and not graph.is_ancestor(start_rev_id, end_rev_id)):
657
raise _StartNotLinearAncestor()
658
# Since we collected the revisions so far, we need to
663
initial_revisions.append((rev_id, revno, depth))
665
# No merged revisions found
666
return initial_revisions
667
except _StartNotLinearAncestor:
668
# A merge was never detected so the lower revision limit can't
669
# be nested down somewhere
670
raise errors.BzrCommandError(gettext('Start revision not found in'
671
' history of end revision.'))
673
# We exit the loop above because we encounter a revision with merges, from
674
# this revision, we need to switch to _graph_view_revisions.
676
# A log including nested merges is required. If the direction is reverse,
677
# we rebase the initial merge depths so that the development line is
678
# shown naturally, i.e. just like it is for linear logging. We can easily
679
# make forward the exact opposite display, but showing the merge revisions
680
# indented at the end seems slightly nicer in that case.
681
view_revisions = itertools.chain(iter(initial_revisions),
682
_graph_view_revisions(branch, start_rev_id, end_rev_id,
683
rebase_initial_depths=(direction == 'reverse'),
684
exclude_common_ancestry=exclude_common_ancestry))
685
return view_revisions
688
def _has_merges(branch, rev_id):
689
"""Does a revision have multiple parents or not?"""
690
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
691
return len(parents) > 1
694
def _compute_revno_str(branch, rev_id):
695
"""Compute the revno string from a rev_id.
697
:return: The revno string, or None if the revision is not in the supplied
701
revno = branch.revision_id_to_dotted_revno(rev_id)
702
except errors.NoSuchRevision:
703
# The revision must be outside of this branch
706
return '.'.join(str(n) for n in revno)
709
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
710
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
711
if start_rev_id and end_rev_id:
713
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
714
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
715
except errors.NoSuchRevision:
716
# one or both is not in the branch; not obvious
718
if len(start_dotted) == 1 and len(end_dotted) == 1:
720
return start_dotted[0] <= end_dotted[0]
721
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
722
start_dotted[0:1] == end_dotted[0:1]):
723
# both on same development line
724
return start_dotted[2] <= end_dotted[2]
728
# if either start or end is not specified then we use either the first or
729
# the last revision and *they* are obvious ancestors.
733
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
734
exclude_common_ancestry=False):
735
"""Calculate a sequence of revisions to view, newest to oldest.
737
:param start_rev_id: the lower revision-id
738
:param end_rev_id: the upper revision-id
739
:param exclude_common_ancestry: Whether the start_rev_id should be part of
740
the iterated revisions.
741
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
742
dotted_revno will be None for ghosts
743
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
744
is not found walking the left-hand history
746
repo = branch.repository
747
graph = repo.get_graph()
748
if start_rev_id is None and end_rev_id is None:
750
br_revno, br_rev_id = branch.last_revision_info()
751
except errors.GhostRevisionsHaveNoRevno:
752
br_rev_id = branch.last_revision()
756
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
757
(_mod_revision.NULL_REVISION,))
760
revision_id = next(graph_iter)
761
except errors.RevisionNotPresent as e:
763
yield e.revision_id, None, None
766
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
767
if cur_revno is not None:
770
br_rev_id = branch.last_revision()
771
if end_rev_id is None:
772
end_rev_id = br_rev_id
773
found_start = start_rev_id is None
774
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
775
(_mod_revision.NULL_REVISION,))
778
revision_id = next(graph_iter)
779
except StopIteration:
781
except errors.RevisionNotPresent as e:
783
yield e.revision_id, None, None
786
revno_str = _compute_revno_str(branch, revision_id)
787
if not found_start and revision_id == start_rev_id:
788
if not exclude_common_ancestry:
789
yield revision_id, revno_str, 0
793
yield revision_id, revno_str, 0
795
raise _StartNotLinearAncestor()
798
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
799
rebase_initial_depths=True,
800
exclude_common_ancestry=False):
801
"""Calculate revisions to view including merges, newest to oldest.
803
:param branch: the branch
804
:param start_rev_id: the lower revision-id
805
:param end_rev_id: the upper revision-id
806
:param rebase_initial_depth: should depths be rebased until a mainline
808
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
810
if exclude_common_ancestry:
811
stop_rule = 'with-merges-without-common-ancestry'
813
stop_rule = 'with-merges'
814
view_revisions = branch.iter_merge_sorted_revisions(
815
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
817
if not rebase_initial_depths:
818
for (rev_id, merge_depth, revno, end_of_merge
820
yield rev_id, '.'.join(map(str, revno)), merge_depth
822
# We're following a development line starting at a merged revision.
823
# We need to adjust depths down by the initial depth until we find
824
# a depth less than it. Then we use that depth as the adjustment.
825
# If and when we reach the mainline, depth adjustment ends.
826
depth_adjustment = None
827
for (rev_id, merge_depth, revno, end_of_merge
829
if depth_adjustment is None:
830
depth_adjustment = merge_depth
832
if merge_depth < depth_adjustment:
833
# From now on we reduce the depth adjustement, this can be
834
# surprising for users. The alternative requires two passes
835
# which breaks the fast display of the first revision
837
depth_adjustment = merge_depth
838
merge_depth -= depth_adjustment
839
yield rev_id, '.'.join(map(str, revno)), merge_depth
842
def _rebase_merge_depth(view_revisions):
843
"""Adjust depths upwards so the top level is 0."""
844
# If either the first or last revision have a merge_depth of 0, we're done
845
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
846
min_depth = min([d for r, n, d in view_revisions])
848
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
849
return view_revisions
852
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
853
file_ids=None, direction='reverse'):
854
"""Create a revision iterator for log.
856
:param branch: The branch being logged.
857
:param view_revisions: The revisions being viewed.
858
:param generate_delta: Whether to generate a delta for each revision.
859
Permitted values are None, 'full' and 'partial'.
860
:param search: A user text search string.
861
:param file_ids: If non empty, only revisions matching one or more of
862
the file-ids are to be kept.
863
:param direction: the direction in which view_revisions is sorted
864
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
867
# Convert view_revisions into (view, None, None) groups to fit with
868
# the standard interface here.
869
if isinstance(view_revisions, list):
870
# A single batch conversion is faster than many incremental ones.
871
# As we have all the data, do a batch conversion.
872
nones = [None] * len(view_revisions)
873
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
876
for view in view_revisions:
877
yield (view, None, None)
878
log_rev_iterator = iter([_convert()])
879
for adapter in log_adapters:
880
# It would be nicer if log adapters were first class objects
881
# with custom parameters. This will do for now. IGC 20090127
882
if adapter == _make_delta_filter:
883
log_rev_iterator = adapter(branch, generate_delta,
884
search, log_rev_iterator, file_ids, direction)
886
log_rev_iterator = adapter(branch, generate_delta,
887
search, log_rev_iterator)
888
return log_rev_iterator
891
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
892
"""Create a filtered iterator of log_rev_iterator matching on a regex.
894
:param branch: The branch being logged.
895
:param generate_delta: Whether to generate a delta for each revision.
896
:param match: A dictionary with properties as keys and lists of strings
897
as values. To match, a revision may match any of the supplied strings
898
within a single property but must match at least one string for each
900
:param log_rev_iterator: An input iterator containing all revisions that
901
could be displayed, in lists.
902
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
906
return log_rev_iterator
907
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
908
for k, v in match.items()]
909
return _filter_re(searchRE, log_rev_iterator)
912
def _filter_re(searchRE, log_rev_iterator):
913
for revs in log_rev_iterator:
914
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
918
def _match_filter(searchRE, rev):
920
'message': (rev.message,),
921
'committer': (rev.committer,),
922
'author': (rev.get_apparent_authors()),
923
'bugs': list(rev.iter_bugs())
925
strings[''] = [item for inner_list in strings.values()
926
for item in inner_list]
927
for (k, v) in searchRE:
928
if k in strings and not _match_any_filter(strings[k], v):
932
def _match_any_filter(strings, res):
933
return any(re.search(s) for re in res for s in strings)
935
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
936
fileids=None, direction='reverse'):
937
"""Add revision deltas to a log iterator if needed.
939
:param branch: The branch being logged.
940
:param generate_delta: Whether to generate a delta for each revision.
941
Permitted values are None, 'full' and 'partial'.
942
:param search: A user text search string.
943
:param log_rev_iterator: An input iterator containing all revisions that
944
could be displayed, in lists.
945
:param fileids: If non empty, only revisions matching one or more of
946
the file-ids are to be kept.
947
:param direction: the direction in which view_revisions is sorted
948
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
951
if not generate_delta and not fileids:
952
return log_rev_iterator
953
return _generate_deltas(branch.repository, log_rev_iterator,
954
generate_delta, fileids, direction)
957
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
959
"""Create deltas for each batch of revisions in log_rev_iterator.
961
If we're only generating deltas for the sake of filtering against
962
file-ids, we stop generating deltas once all file-ids reach the
963
appropriate life-cycle point. If we're receiving data newest to
964
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
966
check_fileids = fileids is not None and len(fileids) > 0
968
fileid_set = set(fileids)
969
if direction == 'reverse':
975
for revs in log_rev_iterator:
976
# If we were matching against fileids and we've run out,
977
# there's nothing left to do
978
if check_fileids and not fileid_set:
980
revisions = [rev[1] for rev in revs]
982
if delta_type == 'full' and not check_fileids:
983
deltas = repository.get_deltas_for_revisions(revisions)
984
for rev, delta in zip(revs, deltas):
985
new_revs.append((rev[0], rev[1], delta))
987
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
988
for rev, delta in zip(revs, deltas):
990
if delta is None or not delta.has_changed():
993
_update_fileids(delta, fileid_set, stop_on)
994
if delta_type is None:
996
elif delta_type == 'full':
997
# If the file matches all the time, rebuilding
998
# a full delta like this in addition to a partial
999
# one could be slow. However, it's likely that
1000
# most revisions won't get this far, making it
1001
# faster to filter on the partial deltas and
1002
# build the occasional full delta than always
1003
# building full deltas and filtering those.
1005
delta = repository.get_revision_delta(rev_id)
1006
new_revs.append((rev[0], rev[1], delta))
1010
def _update_fileids(delta, fileids, stop_on):
1011
"""Update the set of file-ids to search based on file lifecycle events.
1013
:param fileids: a set of fileids to update
1014
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1015
fileids set once their add or remove entry is detected respectively
1017
if stop_on == 'add':
1018
for item in delta.added:
1019
if item[1] in fileids:
1020
fileids.remove(item[1])
1021
elif stop_on == 'delete':
1022
for item in delta.removed:
1023
if item[1] in fileids:
1024
fileids.remove(item[1])
1027
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1028
"""Extract revision objects from the repository
1030
:param branch: The branch being logged.
1031
:param generate_delta: Whether to generate a delta for each revision.
1032
:param search: A user text search string.
1033
:param log_rev_iterator: An input iterator containing all revisions that
1034
could be displayed, in lists.
1035
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1038
repository = branch.repository
1039
for revs in log_rev_iterator:
1040
# r = revision_id, n = revno, d = merge depth
1041
revision_ids = [view[0] for view, _, _ in revs]
1042
revisions = dict(repository.iter_revisions(revision_ids))
1043
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1046
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1047
"""Group up a single large batch into smaller ones.
1049
:param branch: The branch being logged.
1050
:param generate_delta: Whether to generate a delta for each revision.
1051
:param search: A user text search string.
1052
:param log_rev_iterator: An input iterator containing all revisions that
1053
could be displayed, in lists.
1054
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1058
for batch in log_rev_iterator:
1061
step = [detail for _, detail in zip(range(num), batch)]
1065
num = min(int(num * 1.5), 200)
1068
def _get_revision_limits(branch, start_revision, end_revision):
1069
"""Get and check revision limits.
1071
:param branch: The branch containing the revisions.
1073
:param start_revision: The first revision to be logged.
1074
but for merge revision support a RevisionInfo is expected.
1076
:param end_revision: The last revision to be logged.
1077
For backwards compatibility this may be a mainline integer revno,
1078
but for merge revision support a RevisionInfo is expected.
1080
:return: (start_rev_id, end_rev_id) tuple.
1084
if start_revision is not None:
1085
if not isinstance(start_revision, revisionspec.RevisionInfo):
1086
raise TypeError(start_revision)
1087
start_rev_id = start_revision.rev_id
1088
start_revno = start_revision.revno
1089
if start_revno is None:
1094
if end_revision is not None:
1095
if not isinstance(end_revision, revisionspec.RevisionInfo):
1096
raise TypeError(start_revision)
1097
end_rev_id = end_revision.rev_id
1098
end_revno = end_revision.revno
1099
if end_revno is None:
1101
end_revno = branch.revno()
1102
except errors.GhostRevisionsHaveNoRevno:
1105
if branch.last_revision() != _mod_revision.NULL_REVISION:
1106
if (start_rev_id == _mod_revision.NULL_REVISION
1107
or end_rev_id == _mod_revision.NULL_REVISION):
1108
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1109
if end_revno is not None and start_revno > end_revno:
1110
raise errors.BzrCommandError(gettext("Start revision must be "
1111
"older than the end revision."))
1112
return (start_rev_id, end_rev_id)
1115
def _get_mainline_revs(branch, start_revision, end_revision):
1116
"""Get the mainline revisions from the branch.
1118
Generates the list of mainline revisions for the branch.
1120
:param branch: The branch containing the revisions.
1122
:param start_revision: The first revision to be logged.
1123
For backwards compatibility this may be a mainline integer revno,
1124
but for merge revision support a RevisionInfo is expected.
1126
:param end_revision: The last revision to be logged.
1127
For backwards compatibility this may be a mainline integer revno,
1128
but for merge revision support a RevisionInfo is expected.
1130
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1132
branch_revno, branch_last_revision = branch.last_revision_info()
1133
if branch_revno == 0:
1134
return None, None, None, None
1136
# For mainline generation, map start_revision and end_revision to
1137
# mainline revnos. If the revision is not on the mainline choose the
1138
# appropriate extreme of the mainline instead - the extra will be
1140
# Also map the revisions to rev_ids, to be used in the later filtering
1143
if start_revision is None:
1146
if isinstance(start_revision, revisionspec.RevisionInfo):
1147
start_rev_id = start_revision.rev_id
1148
start_revno = start_revision.revno or 1
1150
branch.check_real_revno(start_revision)
1151
start_revno = start_revision
1154
if end_revision is None:
1155
end_revno = branch_revno
1157
if isinstance(end_revision, revisionspec.RevisionInfo):
1158
end_rev_id = end_revision.rev_id
1159
end_revno = end_revision.revno or branch_revno
1161
branch.check_real_revno(end_revision)
1162
end_revno = end_revision
1164
if ((start_rev_id == _mod_revision.NULL_REVISION)
1165
or (end_rev_id == _mod_revision.NULL_REVISION)):
1166
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1167
if start_revno > end_revno:
1168
raise errors.BzrCommandError(gettext("Start revision must be older "
1169
"than the end revision."))
1171
if end_revno < start_revno:
1172
return None, None, None, None
1173
cur_revno = branch_revno
1176
graph = branch.repository.get_graph()
1177
for revision_id in graph.iter_lefthand_ancestry(
1178
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1179
if cur_revno < start_revno:
1180
# We have gone far enough, but we always add 1 more revision
1181
rev_nos[revision_id] = cur_revno
1182
mainline_revs.append(revision_id)
1184
if cur_revno <= end_revno:
1185
rev_nos[revision_id] = cur_revno
1186
mainline_revs.append(revision_id)
1189
# We walked off the edge of all revisions, so we add a 'None' marker
1190
mainline_revs.append(None)
1192
mainline_revs.reverse()
1194
# override the mainline to look like the revision history.
1195
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1198
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1199
include_merges=True):
1200
r"""Return the list of revision ids which touch a given file id.
1202
The function filters view_revisions and returns a subset.
1203
This includes the revisions which directly change the file id,
1204
and the revisions which merge these changes. So if the
1217
And 'C' changes a file, then both C and D will be returned. F will not be
1218
returned even though it brings the changes to C into the branch starting
1219
with E. (Note that if we were using F as the tip instead of G, then we
1222
This will also be restricted based on a subset of the mainline.
1224
:param branch: The branch where we can get text revision information.
1226
:param file_id: Filter out revisions that do not touch file_id.
1228
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1229
tuples. This is the list of revisions which will be filtered. It is
1230
assumed that view_revisions is in merge_sort order (i.e. newest
1233
:param include_merges: include merge revisions in the result or not
1235
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1237
# Lookup all possible text keys to determine which ones actually modified
1239
graph = branch.repository.get_file_graph()
1240
get_parent_map = graph.get_parent_map
1241
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1243
# Looking up keys in batches of 1000 can cut the time in half, as well as
1244
# memory consumption. GraphIndex *does* like to look for a few keys in
1245
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1246
# TODO: This code needs to be re-evaluated periodically as we tune the
1247
# indexing layer. We might consider passing in hints as to the known
1248
# access pattern (sparse/clustered, high success rate/low success
1249
# rate). This particular access is clustered with a low success rate.
1250
modified_text_revisions = set()
1252
for start in range(0, len(text_keys), chunk_size):
1253
next_keys = text_keys[start:start + chunk_size]
1254
# Only keep the revision_id portion of the key
1255
modified_text_revisions.update(
1256
[k[1] for k in get_parent_map(next_keys)])
1257
del text_keys, next_keys
1260
# Track what revisions will merge the current revision, replace entries
1261
# with 'None' when they have been added to result
1262
current_merge_stack = [None]
1263
for info in view_revisions:
1264
rev_id, revno, depth = info
1265
if depth == len(current_merge_stack):
1266
current_merge_stack.append(info)
1268
del current_merge_stack[depth + 1:]
1269
current_merge_stack[-1] = info
1271
if rev_id in modified_text_revisions:
1272
# This needs to be logged, along with the extra revisions
1273
for idx in range(len(current_merge_stack)):
1274
node = current_merge_stack[idx]
1275
if node is not None:
1276
if include_merges or node[2] == 0:
1278
current_merge_stack[idx] = None
1282
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1283
"""Reverse revisions by depth.
1285
Revisions with a different depth are sorted as a group with the previous
1286
revision of that depth. There may be no topological justification for this,
1287
but it looks much nicer.
1289
# Add a fake revision at start so that we can always attach sub revisions
1290
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1292
for val in merge_sorted_revisions:
1293
if val[2] == _depth:
1294
# Each revision at the current depth becomes a chunk grouping all
1295
# higher depth revisions.
1296
zd_revisions.append([val])
1298
zd_revisions[-1].append(val)
1299
for revisions in zd_revisions:
1300
if len(revisions) > 1:
1301
# We have higher depth revisions, let reverse them locally
1302
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1303
zd_revisions.reverse()
1305
for chunk in zd_revisions:
1306
result.extend(chunk)
1308
# Top level call, get rid of the fake revisions that have been added
1309
result = [r for r in result if r[0] is not None and r[1] is not None]
1313
class LogRevision(object):
1314
"""A revision to be logged (by LogFormatter.log_revision).
1316
A simple wrapper for the attributes of a revision to be logged.
1317
The attributes may or may not be populated, as determined by the
1318
logging options and the log formatter capabilities.
1321
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1322
tags=None, diff=None, signature=None):
1327
self.revno = str(revno)
1328
self.merge_depth = merge_depth
1332
self.signature = signature
1335
class LogFormatter(object):
1336
"""Abstract class to display log messages.
1338
At a minimum, a derived class must implement the log_revision method.
1340
If the LogFormatter needs to be informed of the beginning or end of
1341
a log it should implement the begin_log and/or end_log hook methods.
1343
A LogFormatter should define the following supports_XXX flags
1344
to indicate which LogRevision attributes it supports:
1346
- supports_delta must be True if this log formatter supports delta.
1347
Otherwise the delta attribute may not be populated. The 'delta_format'
1348
attribute describes whether the 'short_status' format (1) or the long
1349
one (2) should be used.
1351
- supports_merge_revisions must be True if this log formatter supports
1352
merge revisions. If not, then only mainline revisions will be passed
1355
- preferred_levels is the number of levels this formatter defaults to.
1356
The default value is zero meaning display all levels.
1357
This value is only relevant if supports_merge_revisions is True.
1359
- supports_tags must be True if this log formatter supports tags.
1360
Otherwise the tags attribute may not be populated.
1362
- supports_diff must be True if this log formatter supports diffs.
1363
Otherwise the diff attribute may not be populated.
1365
- supports_signatures must be True if this log formatter supports GPG
1368
Plugins can register functions to show custom revision properties using
1369
the properties_handler_registry. The registered function
1370
must respect the following interface description::
1372
def my_show_properties(properties_dict):
1373
# code that returns a dict {'name':'value'} of the properties
1376
preferred_levels = 0
1378
def __init__(self, to_file, show_ids=False, show_timezone='original',
1379
delta_format=None, levels=None, show_advice=False,
1380
to_exact_file=None, author_list_handler=None):
1381
"""Create a LogFormatter.
1383
:param to_file: the file to output to
1384
:param to_exact_file: if set, gives an output stream to which
1385
non-Unicode diffs are written.
1386
:param show_ids: if True, revision-ids are to be displayed
1387
:param show_timezone: the timezone to use
1388
:param delta_format: the level of delta information to display
1389
or None to leave it to the formatter to decide
1390
:param levels: the number of levels to display; None or -1 to
1391
let the log formatter decide.
1392
:param show_advice: whether to show advice at the end of the
1394
:param author_list_handler: callable generating a list of
1395
authors to display for a given revision
1397
self.to_file = to_file
1398
# 'exact' stream used to show diff, it should print content 'as is'
1399
# and should not try to decode/encode it to unicode to avoid bug #328007
1400
if to_exact_file is not None:
1401
self.to_exact_file = to_exact_file
1403
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1404
# for code that expects to get diffs to pass in the exact file
1406
self.to_exact_file = getattr(to_file, 'stream', to_file)
1407
self.show_ids = show_ids
1408
self.show_timezone = show_timezone
1409
if delta_format is None:
1410
# Ensures backward compatibility
1411
delta_format = 2 # long format
1412
self.delta_format = delta_format
1413
self.levels = levels
1414
self._show_advice = show_advice
1415
self._merge_count = 0
1416
self._author_list_handler = author_list_handler
1418
def get_levels(self):
1419
"""Get the number of levels to display or 0 for all."""
1420
if getattr(self, 'supports_merge_revisions', False):
1421
if self.levels is None or self.levels == -1:
1422
self.levels = self.preferred_levels
1427
def log_revision(self, revision):
1430
:param revision: The LogRevision to be logged.
1432
raise NotImplementedError('not implemented in abstract base')
1434
def show_advice(self):
1435
"""Output user advice, if any, when the log is completed."""
1436
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1437
advice_sep = self.get_advice_separator()
1439
self.to_file.write(advice_sep)
1441
"Use --include-merged or -n0 to see merged revisions.\n")
1443
def get_advice_separator(self):
1444
"""Get the text separating the log from the closing advice."""
1447
def short_committer(self, rev):
1448
name, address = config.parse_username(rev.committer)
1453
def short_author(self, rev):
1454
return self.authors(rev, 'first', short=True, sep=', ')
1456
def authors(self, rev, who, short=False, sep=None):
1457
"""Generate list of authors, taking --authors option into account.
1459
The caller has to specify the name of a author list handler,
1460
as provided by the author list registry, using the ``who``
1461
argument. That name only sets a default, though: when the
1462
user selected a different author list generation using the
1463
``--authors`` command line switch, as represented by the
1464
``author_list_handler`` constructor argument, that value takes
1467
:param rev: The revision for which to generate the list of authors.
1468
:param who: Name of the default handler.
1469
:param short: Whether to shorten names to either name or address.
1470
:param sep: What separator to use for automatic concatenation.
1472
if self._author_list_handler is not None:
1473
# The user did specify --authors, which overrides the default
1474
author_list_handler = self._author_list_handler
1476
# The user didn't specify --authors, so we use the caller's default
1477
author_list_handler = author_list_registry.get(who)
1478
names = author_list_handler(rev)
1480
for i in range(len(names)):
1481
name, address = config.parse_username(names[i])
1487
names = sep.join(names)
1490
def merge_marker(self, revision):
1491
"""Get the merge marker to include in the output or '' if none."""
1492
if len(revision.rev.parent_ids) > 1:
1493
self._merge_count += 1
1498
def show_properties(self, revision, indent):
1499
"""Displays the custom properties returned by each registered handler.
1501
If a registered handler raises an error it is propagated.
1503
for line in self.custom_properties(revision):
1504
self.to_file.write("%s%s\n" % (indent, line))
1506
def custom_properties(self, revision):
1507
"""Format the custom properties returned by each registered handler.
1509
If a registered handler raises an error it is propagated.
1511
:return: a list of formatted lines (excluding trailing newlines)
1513
lines = self._foreign_info_properties(revision)
1514
for key, handler in properties_handler_registry.iteritems():
1515
lines.extend(self._format_properties(handler(revision)))
1518
def _foreign_info_properties(self, rev):
1519
"""Custom log displayer for foreign revision identifiers.
1521
:param rev: Revision object.
1523
# Revision comes directly from a foreign repository
1524
if isinstance(rev, foreign.ForeignRevision):
1525
return self._format_properties(
1526
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1528
# Imported foreign revision revision ids always contain :
1529
if not ":" in rev.revision_id:
1532
# Revision was once imported from a foreign repository
1534
foreign_revid, mapping = \
1535
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1536
except errors.InvalidRevisionId:
1539
return self._format_properties(
1540
mapping.vcs.show_foreign_revid(foreign_revid))
1542
def _format_properties(self, properties):
1544
for key, value in properties.items():
1545
lines.append(key + ': ' + value)
1548
def show_diff(self, to_file, diff, indent):
1549
for l in diff.rstrip().split('\n'):
1550
to_file.write(indent + '%s\n' % (l,))
1553
# Separator between revisions in long format
1554
_LONG_SEP = '-' * 60
1557
class LongLogFormatter(LogFormatter):
1559
supports_merge_revisions = True
1560
preferred_levels = 1
1561
supports_delta = True
1562
supports_tags = True
1563
supports_diff = True
1564
supports_signatures = True
1566
def __init__(self, *args, **kwargs):
1567
super(LongLogFormatter, self).__init__(*args, **kwargs)
1568
if self.show_timezone == 'original':
1569
self.date_string = self._date_string_original_timezone
1571
self.date_string = self._date_string_with_timezone
1573
def _date_string_with_timezone(self, rev):
1574
return format_date(rev.timestamp, rev.timezone or 0,
1577
def _date_string_original_timezone(self, rev):
1578
return format_date_with_offset_in_original_timezone(rev.timestamp,
1581
def log_revision(self, revision):
1582
"""Log a revision, either merged or not."""
1583
indent = ' ' * revision.merge_depth
1585
if revision.revno is not None:
1586
lines.append('revno: %s%s' % (revision.revno,
1587
self.merge_marker(revision)))
1589
lines.append('tags: %s' % (', '.join(revision.tags)))
1590
if self.show_ids or revision.revno is None:
1591
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1593
for parent_id in revision.rev.parent_ids:
1594
lines.append('parent: %s' % (parent_id,))
1595
lines.extend(self.custom_properties(revision.rev))
1597
committer = revision.rev.committer
1598
authors = self.authors(revision.rev, 'all')
1599
if authors != [committer]:
1600
lines.append('author: %s' % (", ".join(authors),))
1601
lines.append('committer: %s' % (committer,))
1603
branch_nick = revision.rev.properties.get('branch-nick', None)
1604
if branch_nick is not None:
1605
lines.append('branch nick: %s' % (branch_nick,))
1607
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1609
if revision.signature is not None:
1610
lines.append('signature: ' + revision.signature)
1612
lines.append('message:')
1613
if not revision.rev.message:
1614
lines.append(' (no message)')
1616
message = revision.rev.message.rstrip('\r\n')
1617
for l in message.split('\n'):
1618
lines.append(' %s' % (l,))
1620
# Dump the output, appending the delta and diff if requested
1621
to_file = self.to_file
1622
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1623
if revision.delta is not None:
1624
# Use the standard status output to display changes
1625
from breezy.delta import report_delta
1626
report_delta(to_file, revision.delta, short_status=False,
1627
show_ids=self.show_ids, indent=indent)
1628
if revision.diff is not None:
1629
to_file.write(indent + 'diff:\n')
1631
# Note: we explicitly don't indent the diff (relative to the
1632
# revision information) so that the output can be fed to patch -p0
1633
self.show_diff(self.to_exact_file, revision.diff, indent)
1634
self.to_exact_file.flush()
1636
def get_advice_separator(self):
1637
"""Get the text separating the log from the closing advice."""
1638
return '-' * 60 + '\n'
1641
class ShortLogFormatter(LogFormatter):
1643
supports_merge_revisions = True
1644
preferred_levels = 1
1645
supports_delta = True
1646
supports_tags = True
1647
supports_diff = True
1649
def __init__(self, *args, **kwargs):
1650
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1651
self.revno_width_by_depth = {}
1653
def log_revision(self, revision):
1654
# We need two indents: one per depth and one for the information
1655
# relative to that indent. Most mainline revnos are 5 chars or
1656
# less while dotted revnos are typically 11 chars or less. Once
1657
# calculated, we need to remember the offset for a given depth
1658
# as we might be starting from a dotted revno in the first column
1659
# and we want subsequent mainline revisions to line up.
1660
depth = revision.merge_depth
1661
indent = ' ' * depth
1662
revno_width = self.revno_width_by_depth.get(depth)
1663
if revno_width is None:
1664
if revision.revno is None or revision.revno.find('.') == -1:
1665
# mainline revno, e.g. 12345
1668
# dotted revno, e.g. 12345.10.55
1670
self.revno_width_by_depth[depth] = revno_width
1671
offset = ' ' * (revno_width + 1)
1673
to_file = self.to_file
1676
tags = ' {%s}' % (', '.join(revision.tags))
1677
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1678
revision.revno or "", self.short_author(revision.rev),
1679
format_date(revision.rev.timestamp,
1680
revision.rev.timezone or 0,
1681
self.show_timezone, date_fmt="%Y-%m-%d",
1683
tags, self.merge_marker(revision)))
1684
self.show_properties(revision.rev, indent+offset)
1685
if self.show_ids or revision.revno is None:
1686
to_file.write(indent + offset + 'revision-id:%s\n'
1687
% (revision.rev.revision_id,))
1688
if not revision.rev.message:
1689
to_file.write(indent + offset + '(no message)\n')
1691
message = revision.rev.message.rstrip('\r\n')
1692
for l in message.split('\n'):
1693
to_file.write(indent + offset + '%s\n' % (l,))
1695
if revision.delta is not None:
1696
# Use the standard status output to display changes
1697
from breezy.delta import report_delta
1698
report_delta(to_file, revision.delta,
1699
short_status=self.delta_format==1,
1700
show_ids=self.show_ids, indent=indent + offset)
1701
if revision.diff is not None:
1702
self.show_diff(self.to_exact_file, revision.diff, ' ')
1706
class LineLogFormatter(LogFormatter):
1708
supports_merge_revisions = True
1709
preferred_levels = 1
1710
supports_tags = True
1712
def __init__(self, *args, **kwargs):
1713
super(LineLogFormatter, self).__init__(*args, **kwargs)
1714
width = terminal_width()
1715
if width is not None:
1716
# we need one extra space for terminals that wrap on last char
1718
self._max_chars = width
1720
def truncate(self, str, max_len):
1721
if max_len is None or len(str) <= max_len:
1723
return str[:max_len-3] + '...'
1725
def date_string(self, rev):
1726
return format_date(rev.timestamp, rev.timezone or 0,
1727
self.show_timezone, date_fmt="%Y-%m-%d",
1730
def message(self, rev):
1732
return '(no message)'
1736
def log_revision(self, revision):
1737
indent = ' ' * revision.merge_depth
1738
self.to_file.write(self.log_string(revision.revno, revision.rev,
1739
self._max_chars, revision.tags, indent))
1740
self.to_file.write('\n')
1742
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1743
"""Format log info into one string. Truncate tail of string
1745
:param revno: revision number or None.
1746
Revision numbers counts from 1.
1747
:param rev: revision object
1748
:param max_chars: maximum length of resulting string
1749
:param tags: list of tags or None
1750
:param prefix: string to prefix each line
1751
:return: formatted truncated string
1755
# show revno only when is not None
1756
out.append("%s:" % revno)
1757
if max_chars is not None:
1758
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1760
out.append(self.short_author(rev))
1761
out.append(self.date_string(rev))
1762
if len(rev.parent_ids) > 1:
1763
out.append('[merge]')
1765
tag_str = '{%s}' % (', '.join(tags))
1767
out.append(rev.get_summary())
1768
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1771
class GnuChangelogLogFormatter(LogFormatter):
1773
supports_merge_revisions = True
1774
supports_delta = True
1776
def log_revision(self, revision):
1777
"""Log a revision, either merged or not."""
1778
to_file = self.to_file
1780
date_str = format_date(revision.rev.timestamp,
1781
revision.rev.timezone or 0,
1783
date_fmt='%Y-%m-%d',
1785
committer_str = self.authors(revision.rev, 'first', sep=', ')
1786
committer_str = committer_str.replace(' <', ' <')
1787
to_file.write('%s %s\n\n' % (date_str, committer_str))
1789
if revision.delta is not None and revision.delta.has_changed():
1790
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1792
to_file.write('\t* %s:\n' % (path,))
1793
for c in revision.delta.renamed:
1794
oldpath, newpath = c[:2]
1795
# For renamed files, show both the old and the new path
1796
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1799
if not revision.rev.message:
1800
to_file.write('\tNo commit message\n')
1802
message = revision.rev.message.rstrip('\r\n')
1803
for l in message.split('\n'):
1804
to_file.write('\t%s\n' % (l.lstrip(),))
1808
def line_log(rev, max_chars):
1809
lf = LineLogFormatter(None)
1810
return lf.log_string(None, rev, max_chars)
1813
class LogFormatterRegistry(registry.Registry):
1814
"""Registry for log formatters"""
1816
def make_formatter(self, name, *args, **kwargs):
1817
"""Construct a formatter from arguments.
1819
:param name: Name of the formatter to construct. 'short', 'long' and
1820
'line' are built-in.
1822
return self.get(name)(*args, **kwargs)
1824
def get_default(self, branch):
1825
c = branch.get_config_stack()
1826
return self.get(c.get('log_format'))
1829
log_formatter_registry = LogFormatterRegistry()
1832
log_formatter_registry.register('short', ShortLogFormatter,
1833
'Moderately short log format.')
1834
log_formatter_registry.register('long', LongLogFormatter,
1835
'Detailed log format.')
1836
log_formatter_registry.register('line', LineLogFormatter,
1837
'Log format with one line per revision.')
1838
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1839
'Format used by GNU ChangeLog files.')
1842
def register_formatter(name, formatter):
1843
log_formatter_registry.register(name, formatter)
1846
def log_formatter(name, *args, **kwargs):
1847
"""Construct a formatter from arguments.
1849
name -- Name of the formatter to construct; currently 'long', 'short' and
1850
'line' are supported.
1853
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1855
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1858
def author_list_all(rev):
1859
return rev.get_apparent_authors()[:]
1862
def author_list_first(rev):
1863
lst = rev.get_apparent_authors()
1870
def author_list_committer(rev):
1871
return [rev.committer]
1874
author_list_registry = registry.Registry()
1876
author_list_registry.register('all', author_list_all,
1879
author_list_registry.register('first', author_list_first,
1882
author_list_registry.register('committer', author_list_committer,
1886
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1888
"""Show the change in revision history comparing the old revision history to the new one.
1890
:param branch: The branch where the revisions exist
1891
:param old_rh: The old revision history
1892
:param new_rh: The new revision history
1893
:param to_file: A file to write the results to. If None, stdout will be used
1896
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1898
lf = log_formatter(log_format,
1901
show_timezone='original')
1903
# This is the first index which is different between
1906
for i in range(max(len(new_rh), len(old_rh))):
1907
if (len(new_rh) <= i
1909
or new_rh[i] != old_rh[i]):
1913
if base_idx is None:
1914
to_file.write('Nothing seems to have changed\n')
1916
## TODO: It might be nice to do something like show_log
1917
## and show the merged entries. But since this is the
1918
## removed revisions, it shouldn't be as important
1919
if base_idx < len(old_rh):
1920
to_file.write('*'*60)
1921
to_file.write('\nRemoved Revisions:\n')
1922
for i in range(base_idx, len(old_rh)):
1923
rev = branch.repository.get_revision(old_rh[i])
1924
lr = LogRevision(rev, i+1, 0, None)
1926
to_file.write('*'*60)
1927
to_file.write('\n\n')
1928
if base_idx < len(new_rh):
1929
to_file.write('Added Revisions:\n')
1934
direction='forward',
1935
start_revision=base_idx+1,
1936
end_revision=len(new_rh),
1940
def get_history_change(old_revision_id, new_revision_id, repository):
1941
"""Calculate the uncommon lefthand history between two revisions.
1943
:param old_revision_id: The original revision id.
1944
:param new_revision_id: The new revision id.
1945
:param repository: The repository to use for the calculation.
1947
return old_history, new_history
1950
old_revisions = set()
1952
new_revisions = set()
1953
graph = repository.get_graph()
1954
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1955
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1956
stop_revision = None
1959
while do_new or do_old:
1962
new_revision = next(new_iter)
1963
except StopIteration:
1966
new_history.append(new_revision)
1967
new_revisions.add(new_revision)
1968
if new_revision in old_revisions:
1969
stop_revision = new_revision
1973
old_revision = next(old_iter)
1974
except StopIteration:
1977
old_history.append(old_revision)
1978
old_revisions.add(old_revision)
1979
if old_revision in new_revisions:
1980
stop_revision = old_revision
1982
new_history.reverse()
1983
old_history.reverse()
1984
if stop_revision is not None:
1985
new_history = new_history[new_history.index(stop_revision) + 1:]
1986
old_history = old_history[old_history.index(stop_revision) + 1:]
1987
return old_history, new_history
1990
def show_branch_change(branch, output, old_revno, old_revision_id):
1991
"""Show the changes made to a branch.
1993
:param branch: The branch to show changes about.
1994
:param output: A file-like object to write changes to.
1995
:param old_revno: The revno of the old tip.
1996
:param old_revision_id: The revision_id of the old tip.
1998
new_revno, new_revision_id = branch.last_revision_info()
1999
old_history, new_history = get_history_change(old_revision_id,
2002
if old_history == [] and new_history == []:
2003
output.write('Nothing seems to have changed\n')
2006
log_format = log_formatter_registry.get_default(branch)
2007
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2008
if old_history != []:
2009
output.write('*'*60)
2010
output.write('\nRemoved Revisions:\n')
2011
show_flat_log(branch.repository, old_history, old_revno, lf)
2012
output.write('*'*60)
2013
output.write('\n\n')
2014
if new_history != []:
2015
output.write('Added Revisions:\n')
2016
start_revno = new_revno - len(new_history) + 1
2017
show_log(branch, lf, None, verbose=False, direction='forward',
2018
start_revision=start_revno)
2021
def show_flat_log(repository, history, last_revno, lf):
2022
"""Show a simple log of the specified history.
2024
:param repository: The repository to retrieve revisions from.
2025
:param history: A list of revision_ids indicating the lefthand history.
2026
:param last_revno: The revno of the last revision_id in the history.
2027
:param lf: The log formatter to use.
2029
start_revno = last_revno - len(history) + 1
2030
revisions = repository.get_revisions(history)
2031
for i, rev in enumerate(revisions):
2032
lr = LogRevision(rev, i + last_revno, 0, None)
2036
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2037
"""Find file-ids and kinds given a list of files and a revision range.
2039
We search for files at the end of the range. If not found there,
2040
we try the start of the range.
2042
:param revisionspec_list: revision range as parsed on the command line
2043
:param file_list: the list of paths given on the command line;
2044
the first of these can be a branch location or a file path,
2045
the remainder must be file paths
2046
:param add_cleanup: When the branch returned is read locked,
2047
an unlock call will be queued to the cleanup.
2048
:return: (branch, info_list, start_rev_info, end_rev_info) where
2049
info_list is a list of (relative_path, file_id, kind) tuples where
2050
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2051
branch will be read-locked.
2053
from breezy.builtins import _get_revision_range
2054
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2056
add_cleanup(b.lock_read().unlock)
2057
# XXX: It's damn messy converting a list of paths to relative paths when
2058
# those paths might be deleted ones, they might be on a case-insensitive
2059
# filesystem and/or they might be in silly locations (like another branch).
2060
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2061
# file2 implicitly in the same dir as file1 or should its directory be
2062
# taken from the current tree somehow?) For now, this solves the common
2063
# case of running log in a nested directory, assuming paths beyond the
2064
# first one haven't been deleted ...
2066
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2068
relpaths = [path] + file_list[1:]
2070
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2072
if relpaths in ([], [u'']):
2073
return b, [], start_rev_info, end_rev_info
2074
if start_rev_info is None and end_rev_info is None:
2076
tree = b.basis_tree()
2079
file_id = tree.path2id(fp)
2080
kind = _get_kind_for_file_id(tree, fp, file_id)
2082
# go back to when time began
2085
rev1 = b.get_rev_id(1)
2086
except errors.NoSuchRevision:
2091
tree1 = b.repository.revision_tree(rev1)
2093
file_id = tree1.path2id(fp)
2094
kind = _get_kind_for_file_id(tree1, fp, file_id)
2095
info_list.append((fp, file_id, kind))
2097
elif start_rev_info == end_rev_info:
2098
# One revision given - file must exist in it
2099
tree = b.repository.revision_tree(end_rev_info.rev_id)
2101
file_id = tree.path2id(fp)
2102
kind = _get_kind_for_file_id(tree, fp, file_id)
2103
info_list.append((fp, file_id, kind))
2106
# Revision range given. Get the file-id from the end tree.
2107
# If that fails, try the start tree.
2108
rev_id = end_rev_info.rev_id
2110
tree = b.basis_tree()
2112
tree = b.repository.revision_tree(rev_id)
2115
file_id = tree.path2id(fp)
2116
kind = _get_kind_for_file_id(tree, fp, file_id)
2119
rev_id = start_rev_info.rev_id
2121
rev1 = b.get_rev_id(1)
2122
tree1 = b.repository.revision_tree(rev1)
2124
tree1 = b.repository.revision_tree(rev_id)
2125
file_id = tree1.path2id(fp)
2126
kind = _get_kind_for_file_id(tree1, fp, file_id)
2127
info_list.append((fp, file_id, kind))
2128
return b, info_list, start_rev_info, end_rev_info
2131
def _get_kind_for_file_id(tree, path, file_id):
2132
"""Return the kind of a file-id or None if it doesn't exist."""
2133
if file_id is not None:
2134
return tree.kind(path, file_id)
2139
properties_handler_registry = registry.Registry()
2141
# Use the properties handlers to print out bug information if available
2142
def _bugs_properties_handler(revision):
2143
if 'bugs' in revision.properties:
2144
bug_lines = revision.properties['bugs'].split('\n')
2145
bug_rows = [line.split(' ', 1) for line in bug_lines]
2146
fixed_bug_urls = [row[0] for row in bug_rows if
2147
len(row) > 1 and row[1] == 'fixed']
2150
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2151
' '.join(fixed_bug_urls)}
2154
properties_handler_registry.register('bugs_properties_handler',
2155
_bugs_properties_handler)
2158
# adapters which revision ids to log are filtered. When log is called, the
2159
# log_rev_iterator is adapted through each of these factory methods.
2160
# Plugins are welcome to mutate this list in any way they like - as long
2161
# as the overall behaviour is preserved. At this point there is no extensible
2162
# mechanism for getting parameters to each factory method, and until there is
2163
# this won't be considered a stable api.
2167
# read revision objects
2168
_make_revision_objects,
2169
# filter on log messages
2170
_make_search_filter,
2171
# generate deltas for things we will show