1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Code to show logs of changes.
19
Various flavors of log can be produced:
21
* for one file, or the whole tree, and (not done yet) for
22
files in a given directory
24
* in "verbose" mode with a description of what changed from one
27
* with file-ids and revision-ids shown
29
Logs are actually written out through an abstract LogFormatter
30
interface, which allows for different preferred formats. Plugins can
33
Logs can be produced in either forward (oldest->newest) or reverse
34
(newest->oldest) order.
36
Logs can be filtered to show only revisions matching a particular
37
search string, or within a particular range of revisions. The range
38
can be given as date/times, which are reduced to revisions before
41
In verbose mode we show a summary of what changed in each particular
42
revision. Note that this is the delta for changes in that revision
43
relative to its left-most parent, not the delta relative to the last
44
logged revision. So for example if you ask for a verbose log of
45
changes touching hello.c you will get a list of those revisions also
46
listing other things that were changed in the same revision, but not
47
all the changes since the previous revision that touched hello.c.
50
from __future__ import absolute_import
56
from warnings import (
60
from .lazy_import import lazy_import
61
lazy_import(globals(), """
68
repository as _mod_repository,
69
revision as _mod_revision,
72
from breezy.i18n import gettext, ngettext
81
from .osutils import (
83
format_date_with_offset_in_original_timezone,
84
get_diff_header_encoding,
85
get_terminal_encoding,
93
from .tree import find_previous_path
96
def find_touching_revisions(repository, last_revision, last_tree, last_path):
97
"""Yield a description of revisions which affect the file_id.
99
Each returned element is (revno, revision_id, description)
101
This is the list of revisions where the file is either added,
102
modified, renamed or deleted.
104
TODO: Perhaps some way to limit this to only particular revisions,
105
or to traverse a non-mainline set of revisions?
107
last_verifier = last_tree.get_file_verifier(last_path)
108
graph = repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(last_revision, []))
111
for revision_id in history:
112
this_tree = repository.revision_tree(revision_id)
113
this_path = find_previous_path(last_tree, this_tree, last_path)
115
# now we know how it was last time, and how it is in this revision.
116
# are those two states effectively the same or not?
117
if this_path is not None and last_path is None:
118
yield revno, revision_id, "deleted " + this_path
119
this_verifier = this_tree.get_file_verifier(this_path)
120
elif this_path is None and last_path is not None:
121
yield revno, revision_id, "added " + last_path
122
elif this_path != last_path:
123
yield revno, revision_id, ("renamed %s => %s" % (this_path, last_path))
124
this_verifier = this_tree.get_file_verifier(this_path)
126
this_verifier = this_tree.get_file_verifier(this_path)
127
if (this_verifier != last_verifier):
128
yield revno, revision_id, "modified " + this_path
130
last_verifier = this_verifier
131
last_path = this_path
132
last_tree = this_tree
133
if last_path is None:
140
specific_fileid=None,
149
"""Write out human-readable log of commits to this branch.
151
This function is being retained for backwards compatibility but
152
should not be extended with new parameters. Use the new Logger class
153
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
154
make_log_request_dict function.
156
:param lf: The LogFormatter object showing the output.
158
:param specific_fileid: If not None, list only the commits affecting the
159
specified file, rather than all commits.
161
:param verbose: If True show added/changed/deleted/renamed files.
163
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
166
:param start_revision: If not None, only show revisions >= start_revision
168
:param end_revision: If not None, only show revisions <= end_revision
170
:param search: If not None, only show revisions with matching commit
173
:param limit: If set, shows only 'limit' revisions, all revisions are shown
176
:param show_diff: If True, output a diff after each revision.
178
:param match: Dictionary of search lists to use when matching revision
181
# Convert old-style parameters to new-style parameters
182
if specific_fileid is not None:
183
file_ids = [specific_fileid]
188
delta_type = 'partial'
195
diff_type = 'partial'
201
if isinstance(start_revision, int):
203
start_revision = revisionspec.RevisionInfo(branch, start_revision)
204
except errors.NoSuchRevision:
205
raise errors.InvalidRevisionNumber(start_revision)
207
if isinstance(end_revision, int):
209
end_revision = revisionspec.RevisionInfo(branch, end_revision)
210
except errors.NoSuchRevision:
211
raise errors.InvalidRevisionNumber(end_revision)
213
if end_revision is not None and end_revision.revno == 0:
214
raise errors.InvalidRevisionNumber(end_revision.revno)
216
# Build the request and execute it
217
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
218
start_revision=start_revision, end_revision=end_revision,
219
limit=limit, message_search=search,
220
delta_type=delta_type, diff_type=diff_type)
221
Logger(branch, rqst).show(lf)
224
# Note: This needs to be kept in sync with the defaults in
225
# make_log_request_dict() below
226
_DEFAULT_REQUEST_PARAMS = {
227
'direction': 'reverse',
229
'generate_tags': True,
230
'exclude_common_ancestry': False,
231
'_match_using_deltas': True,
235
def make_log_request_dict(direction='reverse', specific_fileids=None,
236
start_revision=None, end_revision=None, limit=None,
237
message_search=None, levels=None, generate_tags=True,
239
diff_type=None, _match_using_deltas=True,
240
exclude_common_ancestry=False, match=None,
241
signature=False, omit_merges=False,
243
"""Convenience function for making a logging request dictionary.
245
Using this function may make code slightly safer by ensuring
246
parameters have the correct names. It also provides a reference
247
point for documenting the supported parameters.
249
:param direction: 'reverse' (default) is latest to earliest;
250
'forward' is earliest to latest.
252
:param specific_fileids: If not None, only include revisions
253
affecting the specified files, rather than all revisions.
255
:param start_revision: If not None, only generate
256
revisions >= start_revision
258
:param end_revision: If not None, only generate
259
revisions <= end_revision
261
:param limit: If set, generate only 'limit' revisions, all revisions
262
are shown if None or 0.
264
:param message_search: If not None, only include revisions with
265
matching commit messages
267
:param levels: the number of levels of revisions to
268
generate; 1 for just the mainline; 0 for all levels, or None for
271
:param generate_tags: If True, include tags for matched revisions.
273
:param delta_type: Either 'full', 'partial' or None.
274
'full' means generate the complete delta - adds/deletes/modifies/etc;
275
'partial' means filter the delta using specific_fileids;
276
None means do not generate any delta.
278
:param diff_type: Either 'full', 'partial' or None.
279
'full' means generate the complete diff - adds/deletes/modifies/etc;
280
'partial' means filter the diff using specific_fileids;
281
None means do not generate any diff.
283
:param _match_using_deltas: a private parameter controlling the
284
algorithm used for matching specific_fileids. This parameter
285
may be removed in the future so breezy client code should NOT
288
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
289
range operator or as a graph difference.
291
:param signature: show digital signature information
293
:param match: Dictionary of list of search strings to use when filtering
294
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
295
the empty string to match any of the preceding properties.
297
:param omit_merges: If True, commits with more than one parent are
301
# Take care of old style message_search parameter
304
if 'message' in match:
305
match['message'].append(message_search)
307
match['message'] = [message_search]
309
match={ 'message': [message_search] }
311
'direction': direction,
312
'specific_fileids': specific_fileids,
313
'start_revision': start_revision,
314
'end_revision': end_revision,
317
'generate_tags': generate_tags,
318
'delta_type': delta_type,
319
'diff_type': diff_type,
320
'exclude_common_ancestry': exclude_common_ancestry,
321
'signature': signature,
323
'omit_merges': omit_merges,
324
# Add 'private' attributes for features that may be deprecated
325
'_match_using_deltas': _match_using_deltas,
329
def _apply_log_request_defaults(rqst):
330
"""Apply default values to a request dictionary."""
331
result = _DEFAULT_REQUEST_PARAMS.copy()
337
def format_signature_validity(rev_id, branch):
338
"""get the signature validity
340
:param rev_id: revision id to validate
341
:param branch: branch of revision
342
:return: human readable string to print to log
344
from breezy import gpg
346
gpg_strategy = gpg.GPGStrategy(branch.get_config_stack())
347
result = branch.repository.verify_revision_signature(rev_id, gpg_strategy)
348
if result[0] == gpg.SIGNATURE_VALID:
349
return u"valid signature from {0}".format(result[1])
350
if result[0] == gpg.SIGNATURE_KEY_MISSING:
351
return "unknown key {0}".format(result[1])
352
if result[0] == gpg.SIGNATURE_NOT_VALID:
353
return "invalid signature!"
354
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
355
return "no signature"
358
class LogGenerator(object):
359
"""A generator of log revisions."""
361
def iter_log_revisions(self):
362
"""Iterate over LogRevision objects.
364
:return: An iterator yielding LogRevision objects.
366
raise NotImplementedError(self.iter_log_revisions)
369
class Logger(object):
370
"""An object that generates, formats and displays a log."""
372
def __init__(self, branch, rqst):
375
:param branch: the branch to log
376
:param rqst: A dictionary specifying the query parameters.
377
See make_log_request_dict() for supported values.
380
self.rqst = _apply_log_request_defaults(rqst)
385
:param lf: The LogFormatter object to send the output to.
387
if not isinstance(lf, LogFormatter):
388
warn("not a LogFormatter instance: %r" % lf)
390
with self.branch.lock_read():
391
if getattr(lf, 'begin_log', None):
394
if getattr(lf, 'end_log', None):
397
def _show_body(self, lf):
398
"""Show the main log output.
400
Subclasses may wish to override this.
402
# Tweak the LogRequest based on what the LogFormatter can handle.
403
# (There's no point generating stuff if the formatter can't display it.)
405
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
406
# user didn't specify levels, use whatever the LF can handle:
407
rqst['levels'] = lf.get_levels()
409
if not getattr(lf, 'supports_tags', False):
410
rqst['generate_tags'] = False
411
if not getattr(lf, 'supports_delta', False):
412
rqst['delta_type'] = None
413
if not getattr(lf, 'supports_diff', False):
414
rqst['diff_type'] = None
415
if not getattr(lf, 'supports_signatures', False):
416
rqst['signature'] = False
418
# Find and print the interesting revisions
419
generator = self._generator_factory(self.branch, rqst)
421
for lr in generator.iter_log_revisions():
423
except errors.GhostRevisionUnusableHere:
424
raise errors.BzrCommandError(
425
gettext('Further revision history missing.'))
428
def _generator_factory(self, branch, rqst):
429
"""Make the LogGenerator object to use.
431
Subclasses may wish to override this.
433
return _DefaultLogGenerator(branch, rqst)
436
class _StartNotLinearAncestor(Exception):
437
"""Raised when a start revision is not found walking left-hand history."""
440
class _DefaultLogGenerator(LogGenerator):
441
"""The default generator of log revisions."""
443
def __init__(self, branch, rqst):
446
if rqst.get('generate_tags') and branch.supports_tags():
447
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
449
self.rev_tag_dict = {}
451
def iter_log_revisions(self):
452
"""Iterate over LogRevision objects.
454
:return: An iterator yielding LogRevision objects.
457
levels = rqst.get('levels')
458
limit = rqst.get('limit')
459
diff_type = rqst.get('diff_type')
460
show_signature = rqst.get('signature')
461
omit_merges = rqst.get('omit_merges')
463
revision_iterator = self._create_log_revision_iterator()
464
for revs in revision_iterator:
465
for (rev_id, revno, merge_depth), rev, delta in revs:
466
# 0 levels means show everything; merge_depth counts from 0
467
if levels != 0 and merge_depth >= levels:
469
if omit_merges and len(rev.parent_ids) > 1:
472
raise errors.GhostRevisionUnusableHere(rev_id)
473
if diff_type is None:
476
diff = self._format_diff(rev, rev_id, diff_type)
478
signature = format_signature_validity(rev_id, self.branch)
481
yield LogRevision(rev, revno, merge_depth, delta,
482
self.rev_tag_dict.get(rev_id), diff, signature)
485
if log_count >= limit:
488
def _format_diff(self, rev, rev_id, diff_type):
489
repo = self.branch.repository
490
if len(rev.parent_ids) == 0:
491
ancestor_id = _mod_revision.NULL_REVISION
493
ancestor_id = rev.parent_ids[0]
494
tree_1 = repo.revision_tree(ancestor_id)
495
tree_2 = repo.revision_tree(rev_id)
496
file_ids = self.rqst.get('specific_fileids')
497
if diff_type == 'partial' and file_ids is not None:
498
specific_files = [tree_2.id2path(id) for id in file_ids]
500
specific_files = None
502
path_encoding = get_diff_header_encoding()
503
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
504
new_label='', path_encoding=path_encoding)
507
def _create_log_revision_iterator(self):
508
"""Create a revision iterator for log.
510
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
513
self.start_rev_id, self.end_rev_id = _get_revision_limits(
514
self.branch, self.rqst.get('start_revision'),
515
self.rqst.get('end_revision'))
516
if self.rqst.get('_match_using_deltas'):
517
return self._log_revision_iterator_using_delta_matching()
519
# We're using the per-file-graph algorithm. This scales really
520
# well but only makes sense if there is a single file and it's
522
file_count = len(self.rqst.get('specific_fileids'))
524
raise BzrError("illegal LogRequest: must match-using-deltas "
525
"when logging %d files" % file_count)
526
return self._log_revision_iterator_using_per_file_graph()
528
def _log_revision_iterator_using_delta_matching(self):
529
# Get the base revisions, filtering by the revision range
531
generate_merge_revisions = rqst.get('levels') != 1
532
delayed_graph_generation = not rqst.get('specific_fileids') and (
533
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
534
view_revisions = _calc_view_revisions(
535
self.branch, self.start_rev_id, self.end_rev_id,
536
rqst.get('direction'),
537
generate_merge_revisions=generate_merge_revisions,
538
delayed_graph_generation=delayed_graph_generation,
539
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
541
# Apply the other filters
542
return make_log_rev_iterator(self.branch, view_revisions,
543
rqst.get('delta_type'), rqst.get('match'),
544
file_ids=rqst.get('specific_fileids'),
545
direction=rqst.get('direction'))
547
def _log_revision_iterator_using_per_file_graph(self):
548
# Get the base revisions, filtering by the revision range.
549
# Note that we always generate the merge revisions because
550
# filter_revisions_touching_file_id() requires them ...
552
view_revisions = _calc_view_revisions(
553
self.branch, self.start_rev_id, self.end_rev_id,
554
rqst.get('direction'), generate_merge_revisions=True,
555
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
556
if not isinstance(view_revisions, list):
557
view_revisions = list(view_revisions)
558
view_revisions = _filter_revisions_touching_file_id(self.branch,
559
rqst.get('specific_fileids')[0], view_revisions,
560
include_merges=rqst.get('levels') != 1)
561
return make_log_rev_iterator(self.branch, view_revisions,
562
rqst.get('delta_type'), rqst.get('match'))
565
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
566
generate_merge_revisions,
567
delayed_graph_generation=False,
568
exclude_common_ancestry=False,
570
"""Calculate the revisions to view.
572
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
573
a list of the same tuples.
575
if (exclude_common_ancestry and start_rev_id == end_rev_id):
576
raise errors.BzrCommandError(gettext(
577
'--exclude-common-ancestry requires two different revisions'))
578
if direction not in ('reverse', 'forward'):
579
raise ValueError(gettext('invalid direction %r') % direction)
580
br_rev_id = branch.last_revision()
581
if br_rev_id == _mod_revision.NULL_REVISION:
584
if (end_rev_id and start_rev_id == end_rev_id
585
and (not generate_merge_revisions
586
or not _has_merges(branch, end_rev_id))):
587
# If a single revision is requested, check we can handle it
588
return _generate_one_revision(branch, end_rev_id, br_rev_id,
590
if not generate_merge_revisions:
592
# If we only want to see linear revisions, we can iterate ...
593
iter_revs = _linear_view_revisions(
594
branch, start_rev_id, end_rev_id,
595
exclude_common_ancestry=exclude_common_ancestry)
596
# If a start limit was given and it's not obviously an
597
# ancestor of the end limit, check it before outputting anything
598
if (direction == 'forward'
599
or (start_rev_id and not _is_obvious_ancestor(
600
branch, start_rev_id, end_rev_id))):
601
iter_revs = list(iter_revs)
602
if direction == 'forward':
603
iter_revs = reversed(iter_revs)
605
except _StartNotLinearAncestor:
606
# Switch to the slower implementation that may be able to find a
607
# non-obvious ancestor out of the left-hand history.
609
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
610
direction, delayed_graph_generation,
611
exclude_common_ancestry)
612
if direction == 'forward':
613
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
617
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
618
if rev_id == br_rev_id:
620
return [(br_rev_id, br_revno, 0)]
622
revno_str = _compute_revno_str(branch, rev_id)
623
return [(rev_id, revno_str, 0)]
626
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
627
delayed_graph_generation,
628
exclude_common_ancestry=False):
629
# On large trees, generating the merge graph can take 30-60 seconds
630
# so we delay doing it until a merge is detected, incrementally
631
# returning initial (non-merge) revisions while we can.
633
# The above is only true for old formats (<= 0.92), for newer formats, a
634
# couple of seconds only should be needed to load the whole graph and the
635
# other graph operations needed are even faster than that -- vila 100201
636
initial_revisions = []
637
if delayed_graph_generation:
639
for rev_id, revno, depth in _linear_view_revisions(
640
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
641
if _has_merges(branch, rev_id):
642
# The end_rev_id can be nested down somewhere. We need an
643
# explicit ancestry check. There is an ambiguity here as we
644
# may not raise _StartNotLinearAncestor for a revision that
645
# is an ancestor but not a *linear* one. But since we have
646
# loaded the graph to do the check (or calculate a dotted
647
# revno), we may as well accept to show the log... We need
648
# the check only if start_rev_id is not None as all
649
# revisions have _mod_revision.NULL_REVISION as an ancestor
651
graph = branch.repository.get_graph()
652
if (start_rev_id is not None
653
and not graph.is_ancestor(start_rev_id, end_rev_id)):
654
raise _StartNotLinearAncestor()
655
# Since we collected the revisions so far, we need to
660
initial_revisions.append((rev_id, revno, depth))
662
# No merged revisions found
663
return initial_revisions
664
except _StartNotLinearAncestor:
665
# A merge was never detected so the lower revision limit can't
666
# be nested down somewhere
667
raise errors.BzrCommandError(gettext('Start revision not found in'
668
' history of end revision.'))
670
# We exit the loop above because we encounter a revision with merges, from
671
# this revision, we need to switch to _graph_view_revisions.
673
# A log including nested merges is required. If the direction is reverse,
674
# we rebase the initial merge depths so that the development line is
675
# shown naturally, i.e. just like it is for linear logging. We can easily
676
# make forward the exact opposite display, but showing the merge revisions
677
# indented at the end seems slightly nicer in that case.
678
view_revisions = itertools.chain(iter(initial_revisions),
679
_graph_view_revisions(branch, start_rev_id, end_rev_id,
680
rebase_initial_depths=(direction == 'reverse'),
681
exclude_common_ancestry=exclude_common_ancestry))
682
return view_revisions
685
def _has_merges(branch, rev_id):
686
"""Does a revision have multiple parents or not?"""
687
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
688
return len(parents) > 1
691
def _compute_revno_str(branch, rev_id):
692
"""Compute the revno string from a rev_id.
694
:return: The revno string, or None if the revision is not in the supplied
698
revno = branch.revision_id_to_dotted_revno(rev_id)
699
except errors.NoSuchRevision:
700
# The revision must be outside of this branch
703
return '.'.join(str(n) for n in revno)
706
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
707
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
708
if start_rev_id and end_rev_id:
710
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
711
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
712
except errors.NoSuchRevision:
713
# one or both is not in the branch; not obvious
715
if len(start_dotted) == 1 and len(end_dotted) == 1:
717
return start_dotted[0] <= end_dotted[0]
718
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
719
start_dotted[0:1] == end_dotted[0:1]):
720
# both on same development line
721
return start_dotted[2] <= end_dotted[2]
725
# if either start or end is not specified then we use either the first or
726
# the last revision and *they* are obvious ancestors.
730
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
731
exclude_common_ancestry=False):
732
"""Calculate a sequence of revisions to view, newest to oldest.
734
:param start_rev_id: the lower revision-id
735
:param end_rev_id: the upper revision-id
736
:param exclude_common_ancestry: Whether the start_rev_id should be part of
737
the iterated revisions.
738
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
739
dotted_revno will be None for ghosts
740
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
741
is not found walking the left-hand history
743
repo = branch.repository
744
graph = repo.get_graph()
745
if start_rev_id is None and end_rev_id is None:
747
br_revno, br_rev_id = branch.last_revision_info()
748
except errors.GhostRevisionsHaveNoRevno:
749
br_rev_id = branch.last_revision()
753
graph_iter = graph.iter_lefthand_ancestry(br_rev_id,
754
(_mod_revision.NULL_REVISION,))
757
revision_id = next(graph_iter)
758
except errors.RevisionNotPresent as e:
760
yield e.revision_id, None, None
763
yield revision_id, str(cur_revno) if cur_revno is not None else None, 0
764
if cur_revno is not None:
767
br_rev_id = branch.last_revision()
768
if end_rev_id is None:
769
end_rev_id = br_rev_id
770
found_start = start_rev_id is None
771
graph_iter = graph.iter_lefthand_ancestry(end_rev_id,
772
(_mod_revision.NULL_REVISION,))
775
revision_id = next(graph_iter)
776
except StopIteration:
778
except errors.RevisionNotPresent as e:
780
yield e.revision_id, None, None
783
revno_str = _compute_revno_str(branch, revision_id)
784
if not found_start and revision_id == start_rev_id:
785
if not exclude_common_ancestry:
786
yield revision_id, revno_str, 0
790
yield revision_id, revno_str, 0
792
raise _StartNotLinearAncestor()
795
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
796
rebase_initial_depths=True,
797
exclude_common_ancestry=False):
798
"""Calculate revisions to view including merges, newest to oldest.
800
:param branch: the branch
801
:param start_rev_id: the lower revision-id
802
:param end_rev_id: the upper revision-id
803
:param rebase_initial_depth: should depths be rebased until a mainline
805
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
807
if exclude_common_ancestry:
808
stop_rule = 'with-merges-without-common-ancestry'
810
stop_rule = 'with-merges'
811
view_revisions = branch.iter_merge_sorted_revisions(
812
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
814
if not rebase_initial_depths:
815
for (rev_id, merge_depth, revno, end_of_merge
817
yield rev_id, '.'.join(map(str, revno)), merge_depth
819
# We're following a development line starting at a merged revision.
820
# We need to adjust depths down by the initial depth until we find
821
# a depth less than it. Then we use that depth as the adjustment.
822
# If and when we reach the mainline, depth adjustment ends.
823
depth_adjustment = None
824
for (rev_id, merge_depth, revno, end_of_merge
826
if depth_adjustment is None:
827
depth_adjustment = merge_depth
829
if merge_depth < depth_adjustment:
830
# From now on we reduce the depth adjustement, this can be
831
# surprising for users. The alternative requires two passes
832
# which breaks the fast display of the first revision
834
depth_adjustment = merge_depth
835
merge_depth -= depth_adjustment
836
yield rev_id, '.'.join(map(str, revno)), merge_depth
839
def _rebase_merge_depth(view_revisions):
840
"""Adjust depths upwards so the top level is 0."""
841
# If either the first or last revision have a merge_depth of 0, we're done
842
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
843
min_depth = min([d for r, n, d in view_revisions])
845
view_revisions = [(r, n, d-min_depth) for r, n, d in view_revisions]
846
return view_revisions
849
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
850
file_ids=None, direction='reverse'):
851
"""Create a revision iterator for log.
853
:param branch: The branch being logged.
854
:param view_revisions: The revisions being viewed.
855
:param generate_delta: Whether to generate a delta for each revision.
856
Permitted values are None, 'full' and 'partial'.
857
:param search: A user text search string.
858
:param file_ids: If non empty, only revisions matching one or more of
859
the file-ids are to be kept.
860
:param direction: the direction in which view_revisions is sorted
861
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
864
# Convert view_revisions into (view, None, None) groups to fit with
865
# the standard interface here.
866
if isinstance(view_revisions, list):
867
# A single batch conversion is faster than many incremental ones.
868
# As we have all the data, do a batch conversion.
869
nones = [None] * len(view_revisions)
870
log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
873
for view in view_revisions:
874
yield (view, None, None)
875
log_rev_iterator = iter([_convert()])
876
for adapter in log_adapters:
877
# It would be nicer if log adapters were first class objects
878
# with custom parameters. This will do for now. IGC 20090127
879
if adapter == _make_delta_filter:
880
log_rev_iterator = adapter(branch, generate_delta,
881
search, log_rev_iterator, file_ids, direction)
883
log_rev_iterator = adapter(branch, generate_delta,
884
search, log_rev_iterator)
885
return log_rev_iterator
888
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
889
"""Create a filtered iterator of log_rev_iterator matching on a regex.
891
:param branch: The branch being logged.
892
:param generate_delta: Whether to generate a delta for each revision.
893
:param match: A dictionary with properties as keys and lists of strings
894
as values. To match, a revision may match any of the supplied strings
895
within a single property but must match at least one string for each
897
:param log_rev_iterator: An input iterator containing all revisions that
898
could be displayed, in lists.
899
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
903
return log_rev_iterator
904
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
905
for k, v in match.items()]
906
return _filter_re(searchRE, log_rev_iterator)
909
def _filter_re(searchRE, log_rev_iterator):
910
for revs in log_rev_iterator:
911
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
915
def _match_filter(searchRE, rev):
917
'message': (rev.message,),
918
'committer': (rev.committer,),
919
'author': (rev.get_apparent_authors()),
920
'bugs': list(rev.iter_bugs())
922
strings[''] = [item for inner_list in strings.values()
923
for item in inner_list]
924
for (k, v) in searchRE:
925
if k in strings and not _match_any_filter(strings[k], v):
929
def _match_any_filter(strings, res):
930
return any(re.search(s) for re in res for s in strings)
932
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
933
fileids=None, direction='reverse'):
934
"""Add revision deltas to a log iterator if needed.
936
:param branch: The branch being logged.
937
:param generate_delta: Whether to generate a delta for each revision.
938
Permitted values are None, 'full' and 'partial'.
939
:param search: A user text search string.
940
:param log_rev_iterator: An input iterator containing all revisions that
941
could be displayed, in lists.
942
:param fileids: If non empty, only revisions matching one or more of
943
the file-ids are to be kept.
944
:param direction: the direction in which view_revisions is sorted
945
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
948
if not generate_delta and not fileids:
949
return log_rev_iterator
950
return _generate_deltas(branch.repository, log_rev_iterator,
951
generate_delta, fileids, direction)
954
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
956
"""Create deltas for each batch of revisions in log_rev_iterator.
958
If we're only generating deltas for the sake of filtering against
959
file-ids, we stop generating deltas once all file-ids reach the
960
appropriate life-cycle point. If we're receiving data newest to
961
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
963
check_fileids = fileids is not None and len(fileids) > 0
965
fileid_set = set(fileids)
966
if direction == 'reverse':
972
for revs in log_rev_iterator:
973
# If we were matching against fileids and we've run out,
974
# there's nothing left to do
975
if check_fileids and not fileid_set:
977
revisions = [rev[1] for rev in revs]
979
if delta_type == 'full' and not check_fileids:
980
deltas = repository.get_deltas_for_revisions(revisions)
981
for rev, delta in zip(revs, deltas):
982
new_revs.append((rev[0], rev[1], delta))
984
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
985
for rev, delta in zip(revs, deltas):
987
if delta is None or not delta.has_changed():
990
_update_fileids(delta, fileid_set, stop_on)
991
if delta_type is None:
993
elif delta_type == 'full':
994
# If the file matches all the time, rebuilding
995
# a full delta like this in addition to a partial
996
# one could be slow. However, it's likely that
997
# most revisions won't get this far, making it
998
# faster to filter on the partial deltas and
999
# build the occasional full delta than always
1000
# building full deltas and filtering those.
1002
delta = repository.get_revision_delta(rev_id)
1003
new_revs.append((rev[0], rev[1], delta))
1007
def _update_fileids(delta, fileids, stop_on):
1008
"""Update the set of file-ids to search based on file lifecycle events.
1010
:param fileids: a set of fileids to update
1011
:param stop_on: either 'add' or 'remove' - take file-ids out of the
1012
fileids set once their add or remove entry is detected respectively
1014
if stop_on == 'add':
1015
for item in delta.added:
1016
if item[1] in fileids:
1017
fileids.remove(item[1])
1018
elif stop_on == 'delete':
1019
for item in delta.removed:
1020
if item[1] in fileids:
1021
fileids.remove(item[1])
1024
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1025
"""Extract revision objects from the repository
1027
:param branch: The branch being logged.
1028
:param generate_delta: Whether to generate a delta for each revision.
1029
:param search: A user text search string.
1030
:param log_rev_iterator: An input iterator containing all revisions that
1031
could be displayed, in lists.
1032
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1035
repository = branch.repository
1036
for revs in log_rev_iterator:
1037
# r = revision_id, n = revno, d = merge depth
1038
revision_ids = [view[0] for view, _, _ in revs]
1039
revisions = dict(repository.iter_revisions(revision_ids))
1040
yield [(rev[0], revisions[rev[0][0]], rev[2]) for rev in revs]
1043
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1044
"""Group up a single large batch into smaller ones.
1046
:param branch: The branch being logged.
1047
:param generate_delta: Whether to generate a delta for each revision.
1048
:param search: A user text search string.
1049
:param log_rev_iterator: An input iterator containing all revisions that
1050
could be displayed, in lists.
1051
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1055
for batch in log_rev_iterator:
1058
step = [detail for _, detail in zip(range(num), batch)]
1062
num = min(int(num * 1.5), 200)
1065
def _get_revision_limits(branch, start_revision, end_revision):
1066
"""Get and check revision limits.
1068
:param branch: The branch containing the revisions.
1070
:param start_revision: The first revision to be logged.
1071
but for merge revision support a RevisionInfo is expected.
1073
:param end_revision: The last revision to be logged.
1074
For backwards compatibility this may be a mainline integer revno,
1075
but for merge revision support a RevisionInfo is expected.
1077
:return: (start_rev_id, end_rev_id) tuple.
1081
if start_revision is not None:
1082
if not isinstance(start_revision, revisionspec.RevisionInfo):
1083
raise TypeError(start_revision)
1084
start_rev_id = start_revision.rev_id
1085
start_revno = start_revision.revno
1086
if start_revno is None:
1091
if end_revision is not None:
1092
if not isinstance(end_revision, revisionspec.RevisionInfo):
1093
raise TypeError(start_revision)
1094
end_rev_id = end_revision.rev_id
1095
end_revno = end_revision.revno
1096
if end_revno is None:
1098
end_revno = branch.revno()
1099
except errors.GhostRevisionsHaveNoRevno:
1102
if branch.last_revision() != _mod_revision.NULL_REVISION:
1103
if (start_rev_id == _mod_revision.NULL_REVISION
1104
or end_rev_id == _mod_revision.NULL_REVISION):
1105
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1106
if end_revno is not None and start_revno > end_revno:
1107
raise errors.BzrCommandError(gettext("Start revision must be "
1108
"older than the end revision."))
1109
return (start_rev_id, end_rev_id)
1112
def _get_mainline_revs(branch, start_revision, end_revision):
1113
"""Get the mainline revisions from the branch.
1115
Generates the list of mainline revisions for the branch.
1117
:param branch: The branch containing the revisions.
1119
:param start_revision: The first revision to be logged.
1120
For backwards compatibility this may be a mainline integer revno,
1121
but for merge revision support a RevisionInfo is expected.
1123
:param end_revision: The last revision to be logged.
1124
For backwards compatibility this may be a mainline integer revno,
1125
but for merge revision support a RevisionInfo is expected.
1127
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1129
branch_revno, branch_last_revision = branch.last_revision_info()
1130
if branch_revno == 0:
1131
return None, None, None, None
1133
# For mainline generation, map start_revision and end_revision to
1134
# mainline revnos. If the revision is not on the mainline choose the
1135
# appropriate extreme of the mainline instead - the extra will be
1137
# Also map the revisions to rev_ids, to be used in the later filtering
1140
if start_revision is None:
1143
if isinstance(start_revision, revisionspec.RevisionInfo):
1144
start_rev_id = start_revision.rev_id
1145
start_revno = start_revision.revno or 1
1147
branch.check_real_revno(start_revision)
1148
start_revno = start_revision
1151
if end_revision is None:
1152
end_revno = branch_revno
1154
if isinstance(end_revision, revisionspec.RevisionInfo):
1155
end_rev_id = end_revision.rev_id
1156
end_revno = end_revision.revno or branch_revno
1158
branch.check_real_revno(end_revision)
1159
end_revno = end_revision
1161
if ((start_rev_id == _mod_revision.NULL_REVISION)
1162
or (end_rev_id == _mod_revision.NULL_REVISION)):
1163
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1164
if start_revno > end_revno:
1165
raise errors.BzrCommandError(gettext("Start revision must be older "
1166
"than the end revision."))
1168
if end_revno < start_revno:
1169
return None, None, None, None
1170
cur_revno = branch_revno
1173
graph = branch.repository.get_graph()
1174
for revision_id in graph.iter_lefthand_ancestry(
1175
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1176
if cur_revno < start_revno:
1177
# We have gone far enough, but we always add 1 more revision
1178
rev_nos[revision_id] = cur_revno
1179
mainline_revs.append(revision_id)
1181
if cur_revno <= end_revno:
1182
rev_nos[revision_id] = cur_revno
1183
mainline_revs.append(revision_id)
1186
# We walked off the edge of all revisions, so we add a 'None' marker
1187
mainline_revs.append(None)
1189
mainline_revs.reverse()
1191
# override the mainline to look like the revision history.
1192
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1195
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1196
include_merges=True):
1197
r"""Return the list of revision ids which touch a given file id.
1199
The function filters view_revisions and returns a subset.
1200
This includes the revisions which directly change the file id,
1201
and the revisions which merge these changes. So if the
1214
And 'C' changes a file, then both C and D will be returned. F will not be
1215
returned even though it brings the changes to C into the branch starting
1216
with E. (Note that if we were using F as the tip instead of G, then we
1219
This will also be restricted based on a subset of the mainline.
1221
:param branch: The branch where we can get text revision information.
1223
:param file_id: Filter out revisions that do not touch file_id.
1225
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1226
tuples. This is the list of revisions which will be filtered. It is
1227
assumed that view_revisions is in merge_sort order (i.e. newest
1230
:param include_merges: include merge revisions in the result or not
1232
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1234
# Lookup all possible text keys to determine which ones actually modified
1236
graph = branch.repository.get_file_graph()
1237
get_parent_map = graph.get_parent_map
1238
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1240
# Looking up keys in batches of 1000 can cut the time in half, as well as
1241
# memory consumption. GraphIndex *does* like to look for a few keys in
1242
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1243
# TODO: This code needs to be re-evaluated periodically as we tune the
1244
# indexing layer. We might consider passing in hints as to the known
1245
# access pattern (sparse/clustered, high success rate/low success
1246
# rate). This particular access is clustered with a low success rate.
1247
modified_text_revisions = set()
1249
for start in range(0, len(text_keys), chunk_size):
1250
next_keys = text_keys[start:start + chunk_size]
1251
# Only keep the revision_id portion of the key
1252
modified_text_revisions.update(
1253
[k[1] for k in get_parent_map(next_keys)])
1254
del text_keys, next_keys
1257
# Track what revisions will merge the current revision, replace entries
1258
# with 'None' when they have been added to result
1259
current_merge_stack = [None]
1260
for info in view_revisions:
1261
rev_id, revno, depth = info
1262
if depth == len(current_merge_stack):
1263
current_merge_stack.append(info)
1265
del current_merge_stack[depth + 1:]
1266
current_merge_stack[-1] = info
1268
if rev_id in modified_text_revisions:
1269
# This needs to be logged, along with the extra revisions
1270
for idx in range(len(current_merge_stack)):
1271
node = current_merge_stack[idx]
1272
if node is not None:
1273
if include_merges or node[2] == 0:
1275
current_merge_stack[idx] = None
1279
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1280
"""Reverse revisions by depth.
1282
Revisions with a different depth are sorted as a group with the previous
1283
revision of that depth. There may be no topological justification for this,
1284
but it looks much nicer.
1286
# Add a fake revision at start so that we can always attach sub revisions
1287
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1289
for val in merge_sorted_revisions:
1290
if val[2] == _depth:
1291
# Each revision at the current depth becomes a chunk grouping all
1292
# higher depth revisions.
1293
zd_revisions.append([val])
1295
zd_revisions[-1].append(val)
1296
for revisions in zd_revisions:
1297
if len(revisions) > 1:
1298
# We have higher depth revisions, let reverse them locally
1299
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1300
zd_revisions.reverse()
1302
for chunk in zd_revisions:
1303
result.extend(chunk)
1305
# Top level call, get rid of the fake revisions that have been added
1306
result = [r for r in result if r[0] is not None and r[1] is not None]
1310
class LogRevision(object):
1311
"""A revision to be logged (by LogFormatter.log_revision).
1313
A simple wrapper for the attributes of a revision to be logged.
1314
The attributes may or may not be populated, as determined by the
1315
logging options and the log formatter capabilities.
1318
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1319
tags=None, diff=None, signature=None):
1324
self.revno = str(revno)
1325
self.merge_depth = merge_depth
1329
self.signature = signature
1332
class LogFormatter(object):
1333
"""Abstract class to display log messages.
1335
At a minimum, a derived class must implement the log_revision method.
1337
If the LogFormatter needs to be informed of the beginning or end of
1338
a log it should implement the begin_log and/or end_log hook methods.
1340
A LogFormatter should define the following supports_XXX flags
1341
to indicate which LogRevision attributes it supports:
1343
- supports_delta must be True if this log formatter supports delta.
1344
Otherwise the delta attribute may not be populated. The 'delta_format'
1345
attribute describes whether the 'short_status' format (1) or the long
1346
one (2) should be used.
1348
- supports_merge_revisions must be True if this log formatter supports
1349
merge revisions. If not, then only mainline revisions will be passed
1352
- preferred_levels is the number of levels this formatter defaults to.
1353
The default value is zero meaning display all levels.
1354
This value is only relevant if supports_merge_revisions is True.
1356
- supports_tags must be True if this log formatter supports tags.
1357
Otherwise the tags attribute may not be populated.
1359
- supports_diff must be True if this log formatter supports diffs.
1360
Otherwise the diff attribute may not be populated.
1362
- supports_signatures must be True if this log formatter supports GPG
1365
Plugins can register functions to show custom revision properties using
1366
the properties_handler_registry. The registered function
1367
must respect the following interface description::
1369
def my_show_properties(properties_dict):
1370
# code that returns a dict {'name':'value'} of the properties
1373
preferred_levels = 0
1375
def __init__(self, to_file, show_ids=False, show_timezone='original',
1376
delta_format=None, levels=None, show_advice=False,
1377
to_exact_file=None, author_list_handler=None):
1378
"""Create a LogFormatter.
1380
:param to_file: the file to output to
1381
:param to_exact_file: if set, gives an output stream to which
1382
non-Unicode diffs are written.
1383
:param show_ids: if True, revision-ids are to be displayed
1384
:param show_timezone: the timezone to use
1385
:param delta_format: the level of delta information to display
1386
or None to leave it to the formatter to decide
1387
:param levels: the number of levels to display; None or -1 to
1388
let the log formatter decide.
1389
:param show_advice: whether to show advice at the end of the
1391
:param author_list_handler: callable generating a list of
1392
authors to display for a given revision
1394
self.to_file = to_file
1395
# 'exact' stream used to show diff, it should print content 'as is'
1396
# and should not try to decode/encode it to unicode to avoid bug #328007
1397
if to_exact_file is not None:
1398
self.to_exact_file = to_exact_file
1400
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1401
# for code that expects to get diffs to pass in the exact file
1403
self.to_exact_file = getattr(to_file, 'stream', to_file)
1404
self.show_ids = show_ids
1405
self.show_timezone = show_timezone
1406
if delta_format is None:
1407
# Ensures backward compatibility
1408
delta_format = 2 # long format
1409
self.delta_format = delta_format
1410
self.levels = levels
1411
self._show_advice = show_advice
1412
self._merge_count = 0
1413
self._author_list_handler = author_list_handler
1415
def get_levels(self):
1416
"""Get the number of levels to display or 0 for all."""
1417
if getattr(self, 'supports_merge_revisions', False):
1418
if self.levels is None or self.levels == -1:
1419
self.levels = self.preferred_levels
1424
def log_revision(self, revision):
1427
:param revision: The LogRevision to be logged.
1429
raise NotImplementedError('not implemented in abstract base')
1431
def show_advice(self):
1432
"""Output user advice, if any, when the log is completed."""
1433
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1434
advice_sep = self.get_advice_separator()
1436
self.to_file.write(advice_sep)
1438
"Use --include-merged or -n0 to see merged revisions.\n")
1440
def get_advice_separator(self):
1441
"""Get the text separating the log from the closing advice."""
1444
def short_committer(self, rev):
1445
name, address = config.parse_username(rev.committer)
1450
def short_author(self, rev):
1451
return self.authors(rev, 'first', short=True, sep=', ')
1453
def authors(self, rev, who, short=False, sep=None):
1454
"""Generate list of authors, taking --authors option into account.
1456
The caller has to specify the name of a author list handler,
1457
as provided by the author list registry, using the ``who``
1458
argument. That name only sets a default, though: when the
1459
user selected a different author list generation using the
1460
``--authors`` command line switch, as represented by the
1461
``author_list_handler`` constructor argument, that value takes
1464
:param rev: The revision for which to generate the list of authors.
1465
:param who: Name of the default handler.
1466
:param short: Whether to shorten names to either name or address.
1467
:param sep: What separator to use for automatic concatenation.
1469
if self._author_list_handler is not None:
1470
# The user did specify --authors, which overrides the default
1471
author_list_handler = self._author_list_handler
1473
# The user didn't specify --authors, so we use the caller's default
1474
author_list_handler = author_list_registry.get(who)
1475
names = author_list_handler(rev)
1477
for i in range(len(names)):
1478
name, address = config.parse_username(names[i])
1484
names = sep.join(names)
1487
def merge_marker(self, revision):
1488
"""Get the merge marker to include in the output or '' if none."""
1489
if len(revision.rev.parent_ids) > 1:
1490
self._merge_count += 1
1495
def show_properties(self, revision, indent):
1496
"""Displays the custom properties returned by each registered handler.
1498
If a registered handler raises an error it is propagated.
1500
for line in self.custom_properties(revision):
1501
self.to_file.write("%s%s\n" % (indent, line))
1503
def custom_properties(self, revision):
1504
"""Format the custom properties returned by each registered handler.
1506
If a registered handler raises an error it is propagated.
1508
:return: a list of formatted lines (excluding trailing newlines)
1510
lines = self._foreign_info_properties(revision)
1511
for key, handler in properties_handler_registry.iteritems():
1512
lines.extend(self._format_properties(handler(revision)))
1515
def _foreign_info_properties(self, rev):
1516
"""Custom log displayer for foreign revision identifiers.
1518
:param rev: Revision object.
1520
# Revision comes directly from a foreign repository
1521
if isinstance(rev, foreign.ForeignRevision):
1522
return self._format_properties(
1523
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1525
# Imported foreign revision revision ids always contain :
1526
if not ":" in rev.revision_id:
1529
# Revision was once imported from a foreign repository
1531
foreign_revid, mapping = \
1532
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1533
except errors.InvalidRevisionId:
1536
return self._format_properties(
1537
mapping.vcs.show_foreign_revid(foreign_revid))
1539
def _format_properties(self, properties):
1541
for key, value in properties.items():
1542
lines.append(key + ': ' + value)
1545
def show_diff(self, to_file, diff, indent):
1546
for l in diff.rstrip().split('\n'):
1547
to_file.write(indent + '%s\n' % (l,))
1550
# Separator between revisions in long format
1551
_LONG_SEP = '-' * 60
1554
class LongLogFormatter(LogFormatter):
1556
supports_merge_revisions = True
1557
preferred_levels = 1
1558
supports_delta = True
1559
supports_tags = True
1560
supports_diff = True
1561
supports_signatures = True
1563
def __init__(self, *args, **kwargs):
1564
super(LongLogFormatter, self).__init__(*args, **kwargs)
1565
if self.show_timezone == 'original':
1566
self.date_string = self._date_string_original_timezone
1568
self.date_string = self._date_string_with_timezone
1570
def _date_string_with_timezone(self, rev):
1571
return format_date(rev.timestamp, rev.timezone or 0,
1574
def _date_string_original_timezone(self, rev):
1575
return format_date_with_offset_in_original_timezone(rev.timestamp,
1578
def log_revision(self, revision):
1579
"""Log a revision, either merged or not."""
1580
indent = ' ' * revision.merge_depth
1582
if revision.revno is not None:
1583
lines.append('revno: %s%s' % (revision.revno,
1584
self.merge_marker(revision)))
1586
lines.append('tags: %s' % (', '.join(revision.tags)))
1587
if self.show_ids or revision.revno is None:
1588
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1590
for parent_id in revision.rev.parent_ids:
1591
lines.append('parent: %s' % (parent_id,))
1592
lines.extend(self.custom_properties(revision.rev))
1594
committer = revision.rev.committer
1595
authors = self.authors(revision.rev, 'all')
1596
if authors != [committer]:
1597
lines.append('author: %s' % (", ".join(authors),))
1598
lines.append('committer: %s' % (committer,))
1600
branch_nick = revision.rev.properties.get('branch-nick', None)
1601
if branch_nick is not None:
1602
lines.append('branch nick: %s' % (branch_nick,))
1604
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1606
if revision.signature is not None:
1607
lines.append('signature: ' + revision.signature)
1609
lines.append('message:')
1610
if not revision.rev.message:
1611
lines.append(' (no message)')
1613
message = revision.rev.message.rstrip('\r\n')
1614
for l in message.split('\n'):
1615
lines.append(' %s' % (l,))
1617
# Dump the output, appending the delta and diff if requested
1618
to_file = self.to_file
1619
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1620
if revision.delta is not None:
1621
# Use the standard status output to display changes
1622
from breezy.delta import report_delta
1623
report_delta(to_file, revision.delta, short_status=False,
1624
show_ids=self.show_ids, indent=indent)
1625
if revision.diff is not None:
1626
to_file.write(indent + 'diff:\n')
1628
# Note: we explicitly don't indent the diff (relative to the
1629
# revision information) so that the output can be fed to patch -p0
1630
self.show_diff(self.to_exact_file, revision.diff, indent)
1631
self.to_exact_file.flush()
1633
def get_advice_separator(self):
1634
"""Get the text separating the log from the closing advice."""
1635
return '-' * 60 + '\n'
1638
class ShortLogFormatter(LogFormatter):
1640
supports_merge_revisions = True
1641
preferred_levels = 1
1642
supports_delta = True
1643
supports_tags = True
1644
supports_diff = True
1646
def __init__(self, *args, **kwargs):
1647
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1648
self.revno_width_by_depth = {}
1650
def log_revision(self, revision):
1651
# We need two indents: one per depth and one for the information
1652
# relative to that indent. Most mainline revnos are 5 chars or
1653
# less while dotted revnos are typically 11 chars or less. Once
1654
# calculated, we need to remember the offset for a given depth
1655
# as we might be starting from a dotted revno in the first column
1656
# and we want subsequent mainline revisions to line up.
1657
depth = revision.merge_depth
1658
indent = ' ' * depth
1659
revno_width = self.revno_width_by_depth.get(depth)
1660
if revno_width is None:
1661
if revision.revno is None or revision.revno.find('.') == -1:
1662
# mainline revno, e.g. 12345
1665
# dotted revno, e.g. 12345.10.55
1667
self.revno_width_by_depth[depth] = revno_width
1668
offset = ' ' * (revno_width + 1)
1670
to_file = self.to_file
1673
tags = ' {%s}' % (', '.join(revision.tags))
1674
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1675
revision.revno or "", self.short_author(revision.rev),
1676
format_date(revision.rev.timestamp,
1677
revision.rev.timezone or 0,
1678
self.show_timezone, date_fmt="%Y-%m-%d",
1680
tags, self.merge_marker(revision)))
1681
self.show_properties(revision.rev, indent+offset)
1682
if self.show_ids or revision.revno is None:
1683
to_file.write(indent + offset + 'revision-id:%s\n'
1684
% (revision.rev.revision_id,))
1685
if not revision.rev.message:
1686
to_file.write(indent + offset + '(no message)\n')
1688
message = revision.rev.message.rstrip('\r\n')
1689
for l in message.split('\n'):
1690
to_file.write(indent + offset + '%s\n' % (l,))
1692
if revision.delta is not None:
1693
# Use the standard status output to display changes
1694
from breezy.delta import report_delta
1695
report_delta(to_file, revision.delta,
1696
short_status=self.delta_format==1,
1697
show_ids=self.show_ids, indent=indent + offset)
1698
if revision.diff is not None:
1699
self.show_diff(self.to_exact_file, revision.diff, ' ')
1703
class LineLogFormatter(LogFormatter):
1705
supports_merge_revisions = True
1706
preferred_levels = 1
1707
supports_tags = True
1709
def __init__(self, *args, **kwargs):
1710
super(LineLogFormatter, self).__init__(*args, **kwargs)
1711
width = terminal_width()
1712
if width is not None:
1713
# we need one extra space for terminals that wrap on last char
1715
self._max_chars = width
1717
def truncate(self, str, max_len):
1718
if max_len is None or len(str) <= max_len:
1720
return str[:max_len-3] + '...'
1722
def date_string(self, rev):
1723
return format_date(rev.timestamp, rev.timezone or 0,
1724
self.show_timezone, date_fmt="%Y-%m-%d",
1727
def message(self, rev):
1729
return '(no message)'
1733
def log_revision(self, revision):
1734
indent = ' ' * revision.merge_depth
1735
self.to_file.write(self.log_string(revision.revno, revision.rev,
1736
self._max_chars, revision.tags, indent))
1737
self.to_file.write('\n')
1739
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1740
"""Format log info into one string. Truncate tail of string
1742
:param revno: revision number or None.
1743
Revision numbers counts from 1.
1744
:param rev: revision object
1745
:param max_chars: maximum length of resulting string
1746
:param tags: list of tags or None
1747
:param prefix: string to prefix each line
1748
:return: formatted truncated string
1752
# show revno only when is not None
1753
out.append("%s:" % revno)
1754
if max_chars is not None:
1755
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1757
out.append(self.short_author(rev))
1758
out.append(self.date_string(rev))
1759
if len(rev.parent_ids) > 1:
1760
out.append('[merge]')
1762
tag_str = '{%s}' % (', '.join(tags))
1764
out.append(rev.get_summary())
1765
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1768
class GnuChangelogLogFormatter(LogFormatter):
1770
supports_merge_revisions = True
1771
supports_delta = True
1773
def log_revision(self, revision):
1774
"""Log a revision, either merged or not."""
1775
to_file = self.to_file
1777
date_str = format_date(revision.rev.timestamp,
1778
revision.rev.timezone or 0,
1780
date_fmt='%Y-%m-%d',
1782
committer_str = self.authors(revision.rev, 'first', sep=', ')
1783
committer_str = committer_str.replace(' <', ' <')
1784
to_file.write('%s %s\n\n' % (date_str, committer_str))
1786
if revision.delta is not None and revision.delta.has_changed():
1787
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1789
to_file.write('\t* %s:\n' % (path,))
1790
for c in revision.delta.renamed:
1791
oldpath, newpath = c[:2]
1792
# For renamed files, show both the old and the new path
1793
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath, newpath))
1796
if not revision.rev.message:
1797
to_file.write('\tNo commit message\n')
1799
message = revision.rev.message.rstrip('\r\n')
1800
for l in message.split('\n'):
1801
to_file.write('\t%s\n' % (l.lstrip(),))
1805
def line_log(rev, max_chars):
1806
lf = LineLogFormatter(None)
1807
return lf.log_string(None, rev, max_chars)
1810
class LogFormatterRegistry(registry.Registry):
1811
"""Registry for log formatters"""
1813
def make_formatter(self, name, *args, **kwargs):
1814
"""Construct a formatter from arguments.
1816
:param name: Name of the formatter to construct. 'short', 'long' and
1817
'line' are built-in.
1819
return self.get(name)(*args, **kwargs)
1821
def get_default(self, branch):
1822
c = branch.get_config_stack()
1823
return self.get(c.get('log_format'))
1826
log_formatter_registry = LogFormatterRegistry()
1829
log_formatter_registry.register('short', ShortLogFormatter,
1830
'Moderately short log format.')
1831
log_formatter_registry.register('long', LongLogFormatter,
1832
'Detailed log format.')
1833
log_formatter_registry.register('line', LineLogFormatter,
1834
'Log format with one line per revision.')
1835
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1836
'Format used by GNU ChangeLog files.')
1839
def register_formatter(name, formatter):
1840
log_formatter_registry.register(name, formatter)
1843
def log_formatter(name, *args, **kwargs):
1844
"""Construct a formatter from arguments.
1846
name -- Name of the formatter to construct; currently 'long', 'short' and
1847
'line' are supported.
1850
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1852
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1855
def author_list_all(rev):
1856
return rev.get_apparent_authors()[:]
1859
def author_list_first(rev):
1860
lst = rev.get_apparent_authors()
1867
def author_list_committer(rev):
1868
return [rev.committer]
1871
author_list_registry = registry.Registry()
1873
author_list_registry.register('all', author_list_all,
1876
author_list_registry.register('first', author_list_first,
1879
author_list_registry.register('committer', author_list_committer,
1883
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1885
"""Show the change in revision history comparing the old revision history to the new one.
1887
:param branch: The branch where the revisions exist
1888
:param old_rh: The old revision history
1889
:param new_rh: The new revision history
1890
:param to_file: A file to write the results to. If None, stdout will be used
1893
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1895
lf = log_formatter(log_format,
1898
show_timezone='original')
1900
# This is the first index which is different between
1903
for i in range(max(len(new_rh), len(old_rh))):
1904
if (len(new_rh) <= i
1906
or new_rh[i] != old_rh[i]):
1910
if base_idx is None:
1911
to_file.write('Nothing seems to have changed\n')
1913
## TODO: It might be nice to do something like show_log
1914
## and show the merged entries. But since this is the
1915
## removed revisions, it shouldn't be as important
1916
if base_idx < len(old_rh):
1917
to_file.write('*'*60)
1918
to_file.write('\nRemoved Revisions:\n')
1919
for i in range(base_idx, len(old_rh)):
1920
rev = branch.repository.get_revision(old_rh[i])
1921
lr = LogRevision(rev, i+1, 0, None)
1923
to_file.write('*'*60)
1924
to_file.write('\n\n')
1925
if base_idx < len(new_rh):
1926
to_file.write('Added Revisions:\n')
1931
direction='forward',
1932
start_revision=base_idx+1,
1933
end_revision=len(new_rh),
1937
def get_history_change(old_revision_id, new_revision_id, repository):
1938
"""Calculate the uncommon lefthand history between two revisions.
1940
:param old_revision_id: The original revision id.
1941
:param new_revision_id: The new revision id.
1942
:param repository: The repository to use for the calculation.
1944
return old_history, new_history
1947
old_revisions = set()
1949
new_revisions = set()
1950
graph = repository.get_graph()
1951
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1952
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1953
stop_revision = None
1956
while do_new or do_old:
1959
new_revision = next(new_iter)
1960
except StopIteration:
1963
new_history.append(new_revision)
1964
new_revisions.add(new_revision)
1965
if new_revision in old_revisions:
1966
stop_revision = new_revision
1970
old_revision = next(old_iter)
1971
except StopIteration:
1974
old_history.append(old_revision)
1975
old_revisions.add(old_revision)
1976
if old_revision in new_revisions:
1977
stop_revision = old_revision
1979
new_history.reverse()
1980
old_history.reverse()
1981
if stop_revision is not None:
1982
new_history = new_history[new_history.index(stop_revision) + 1:]
1983
old_history = old_history[old_history.index(stop_revision) + 1:]
1984
return old_history, new_history
1987
def show_branch_change(branch, output, old_revno, old_revision_id):
1988
"""Show the changes made to a branch.
1990
:param branch: The branch to show changes about.
1991
:param output: A file-like object to write changes to.
1992
:param old_revno: The revno of the old tip.
1993
:param old_revision_id: The revision_id of the old tip.
1995
new_revno, new_revision_id = branch.last_revision_info()
1996
old_history, new_history = get_history_change(old_revision_id,
1999
if old_history == [] and new_history == []:
2000
output.write('Nothing seems to have changed\n')
2003
log_format = log_formatter_registry.get_default(branch)
2004
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2005
if old_history != []:
2006
output.write('*'*60)
2007
output.write('\nRemoved Revisions:\n')
2008
show_flat_log(branch.repository, old_history, old_revno, lf)
2009
output.write('*'*60)
2010
output.write('\n\n')
2011
if new_history != []:
2012
output.write('Added Revisions:\n')
2013
start_revno = new_revno - len(new_history) + 1
2014
show_log(branch, lf, None, verbose=False, direction='forward',
2015
start_revision=start_revno)
2018
def show_flat_log(repository, history, last_revno, lf):
2019
"""Show a simple log of the specified history.
2021
:param repository: The repository to retrieve revisions from.
2022
:param history: A list of revision_ids indicating the lefthand history.
2023
:param last_revno: The revno of the last revision_id in the history.
2024
:param lf: The log formatter to use.
2026
start_revno = last_revno - len(history) + 1
2027
revisions = repository.get_revisions(history)
2028
for i, rev in enumerate(revisions):
2029
lr = LogRevision(rev, i + last_revno, 0, None)
2033
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2034
"""Find file-ids and kinds given a list of files and a revision range.
2036
We search for files at the end of the range. If not found there,
2037
we try the start of the range.
2039
:param revisionspec_list: revision range as parsed on the command line
2040
:param file_list: the list of paths given on the command line;
2041
the first of these can be a branch location or a file path,
2042
the remainder must be file paths
2043
:param add_cleanup: When the branch returned is read locked,
2044
an unlock call will be queued to the cleanup.
2045
:return: (branch, info_list, start_rev_info, end_rev_info) where
2046
info_list is a list of (relative_path, file_id, kind) tuples where
2047
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2048
branch will be read-locked.
2050
from breezy.builtins import _get_revision_range
2051
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2053
add_cleanup(b.lock_read().unlock)
2054
# XXX: It's damn messy converting a list of paths to relative paths when
2055
# those paths might be deleted ones, they might be on a case-insensitive
2056
# filesystem and/or they might be in silly locations (like another branch).
2057
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2058
# file2 implicitly in the same dir as file1 or should its directory be
2059
# taken from the current tree somehow?) For now, this solves the common
2060
# case of running log in a nested directory, assuming paths beyond the
2061
# first one haven't been deleted ...
2063
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2065
relpaths = [path] + file_list[1:]
2067
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2069
if relpaths in ([], [u'']):
2070
return b, [], start_rev_info, end_rev_info
2071
if start_rev_info is None and end_rev_info is None:
2073
tree = b.basis_tree()
2076
file_id = tree.path2id(fp)
2077
kind = _get_kind_for_file_id(tree, fp, file_id)
2079
# go back to when time began
2082
rev1 = b.get_rev_id(1)
2083
except errors.NoSuchRevision:
2088
tree1 = b.repository.revision_tree(rev1)
2090
file_id = tree1.path2id(fp)
2091
kind = _get_kind_for_file_id(tree1, fp, file_id)
2092
info_list.append((fp, file_id, kind))
2094
elif start_rev_info == end_rev_info:
2095
# One revision given - file must exist in it
2096
tree = b.repository.revision_tree(end_rev_info.rev_id)
2098
file_id = tree.path2id(fp)
2099
kind = _get_kind_for_file_id(tree, fp, file_id)
2100
info_list.append((fp, file_id, kind))
2103
# Revision range given. Get the file-id from the end tree.
2104
# If that fails, try the start tree.
2105
rev_id = end_rev_info.rev_id
2107
tree = b.basis_tree()
2109
tree = b.repository.revision_tree(rev_id)
2112
file_id = tree.path2id(fp)
2113
kind = _get_kind_for_file_id(tree, fp, file_id)
2116
rev_id = start_rev_info.rev_id
2118
rev1 = b.get_rev_id(1)
2119
tree1 = b.repository.revision_tree(rev1)
2121
tree1 = b.repository.revision_tree(rev_id)
2122
file_id = tree1.path2id(fp)
2123
kind = _get_kind_for_file_id(tree1, fp, file_id)
2124
info_list.append((fp, file_id, kind))
2125
return b, info_list, start_rev_info, end_rev_info
2128
def _get_kind_for_file_id(tree, path, file_id):
2129
"""Return the kind of a file-id or None if it doesn't exist."""
2130
if file_id is not None:
2131
return tree.kind(path, file_id)
2136
properties_handler_registry = registry.Registry()
2138
# Use the properties handlers to print out bug information if available
2139
def _bugs_properties_handler(revision):
2140
if 'bugs' in revision.properties:
2141
bug_lines = revision.properties['bugs'].split('\n')
2142
bug_rows = [line.split(' ', 1) for line in bug_lines]
2143
fixed_bug_urls = [row[0] for row in bug_rows if
2144
len(row) > 1 and row[1] == 'fixed']
2147
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2148
' '.join(fixed_bug_urls)}
2151
properties_handler_registry.register('bugs_properties_handler',
2152
_bugs_properties_handler)
2155
# adapters which revision ids to log are filtered. When log is called, the
2156
# log_rev_iterator is adapted through each of these factory methods.
2157
# Plugins are welcome to mutate this list in any way they like - as long
2158
# as the overall behaviour is preserved. At this point there is no extensible
2159
# mechanism for getting parameters to each factory method, and until there is
2160
# this won't be considered a stable api.
2164
# read revision objects
2165
_make_revision_objects,
2166
# filter on log messages
2167
_make_search_filter,
2168
# generate deltas for things we will show